1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
65 #include "langhooks.h"
68 /* Non-zero if we are folding constants inside an initializer; zero
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
208 int sign_extended_type;
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
214 prec = TYPE_PRECISION (type);
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)l1 < 0)
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
287 int sign_extended_type;
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
295 overflow = fit_double_type (low, high, &low, &high, type);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
302 || (overflowable > 0 && sign_extended_type))
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
329 unsigned HOST_WIDE_INT l;
333 h = h1 + h2 + (l < l1);
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
357 return (*hv & h1) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
395 for (j = 0; j < 4; j++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
449 if (SHIFT_COUNT_TRUNCATED)
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 unsigned HOST_WIDE_INT signmask;
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 if (SHIFT_COUNT_TRUNCATED)
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
707 { /* scale divisor and dividend */
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
786 decode (quo, lquo, hquo);
789 /* If result is negative, make it so. */
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, <wice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
901 return build_int_cst_wide (type, quol, quoh);
904 /* This is non-zero if we should defer warnings about undefined
905 overflow. This facility exists because these warnings are a
906 special case. The code to estimate loop iterations does not want
907 to issue any warnings, since it works with expressions which do not
908 occur in user code. Various bits of cleanup code call fold(), but
909 only use the result if it has certain characteristics (e.g., is a
910 constant); that code only wants to issue a warning if the result is
913 static int fold_deferring_overflow_warnings;
915 /* If a warning about undefined overflow is deferred, this is the
916 warning. Note that this may cause us to turn two warnings into
917 one, but that is fine since it is sufficient to only give one
918 warning per expression. */
920 static const char* fold_deferred_overflow_warning;
922 /* If a warning about undefined overflow is deferred, this is the
923 level at which the warning should be emitted. */
925 static enum warn_strict_overflow_code fold_deferred_overflow_code;
927 /* Start deferring overflow warnings. We could use a stack here to
928 permit nested calls, but at present it is not necessary. */
931 fold_defer_overflow_warnings (void)
933 ++fold_deferring_overflow_warnings;
936 /* Stop deferring overflow warnings. If there is a pending warning,
937 and ISSUE is true, then issue the warning if appropriate. STMT is
938 the statement with which the warning should be associated (used for
939 location information); STMT may be NULL. CODE is the level of the
940 warning--a warn_strict_overflow_code value. This function will use
941 the smaller of CODE and the deferred code when deciding whether to
942 issue the warning. CODE may be zero to mean to always use the
946 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
951 gcc_assert (fold_deferring_overflow_warnings > 0);
952 --fold_deferring_overflow_warnings;
953 if (fold_deferring_overflow_warnings > 0)
955 if (fold_deferred_overflow_warning != NULL
957 && code < (int) fold_deferred_overflow_code)
958 fold_deferred_overflow_code = code;
962 warnmsg = fold_deferred_overflow_warning;
963 fold_deferred_overflow_warning = NULL;
965 if (!issue || warnmsg == NULL)
968 /* Use the smallest code level when deciding to issue the
970 if (code == 0 || code > (int) fold_deferred_overflow_code)
971 code = fold_deferred_overflow_code;
973 if (!issue_strict_overflow_warning (code))
976 if (stmt == NULL_TREE || !expr_has_location (stmt))
977 locus = input_location;
979 locus = expr_location (stmt);
980 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
983 /* Stop deferring overflow warnings, ignoring any deferred
987 fold_undefer_and_ignore_overflow_warnings (void)
989 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
992 /* Whether we are deferring overflow warnings. */
995 fold_deferring_overflow_warnings_p (void)
997 return fold_deferring_overflow_warnings > 0;
1000 /* This is called when we fold something based on the fact that signed
1001 overflow is undefined. */
1004 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1006 gcc_assert (!flag_wrapv && !flag_trapv);
1007 if (fold_deferring_overflow_warnings > 0)
1009 if (fold_deferred_overflow_warning == NULL
1010 || wc < fold_deferred_overflow_code)
1012 fold_deferred_overflow_warning = gmsgid;
1013 fold_deferred_overflow_code = wc;
1016 else if (issue_strict_overflow_warning (wc))
1017 warning (OPT_Wstrict_overflow, gmsgid);
1020 /* Return true if the built-in mathematical function specified by CODE
1021 is odd, i.e. -f(x) == f(-x). */
1024 negate_mathfn_p (enum built_in_function code)
1028 CASE_FLT_FN (BUILT_IN_ASIN):
1029 CASE_FLT_FN (BUILT_IN_ASINH):
1030 CASE_FLT_FN (BUILT_IN_ATAN):
1031 CASE_FLT_FN (BUILT_IN_ATANH):
1032 CASE_FLT_FN (BUILT_IN_CASIN):
1033 CASE_FLT_FN (BUILT_IN_CASINH):
1034 CASE_FLT_FN (BUILT_IN_CATAN):
1035 CASE_FLT_FN (BUILT_IN_CATANH):
1036 CASE_FLT_FN (BUILT_IN_CBRT):
1037 CASE_FLT_FN (BUILT_IN_CPROJ):
1038 CASE_FLT_FN (BUILT_IN_CSIN):
1039 CASE_FLT_FN (BUILT_IN_CSINH):
1040 CASE_FLT_FN (BUILT_IN_CTAN):
1041 CASE_FLT_FN (BUILT_IN_CTANH):
1042 CASE_FLT_FN (BUILT_IN_ERF):
1043 CASE_FLT_FN (BUILT_IN_LLROUND):
1044 CASE_FLT_FN (BUILT_IN_LROUND):
1045 CASE_FLT_FN (BUILT_IN_ROUND):
1046 CASE_FLT_FN (BUILT_IN_SIN):
1047 CASE_FLT_FN (BUILT_IN_SINH):
1048 CASE_FLT_FN (BUILT_IN_TAN):
1049 CASE_FLT_FN (BUILT_IN_TANH):
1050 CASE_FLT_FN (BUILT_IN_TRUNC):
1053 CASE_FLT_FN (BUILT_IN_LLRINT):
1054 CASE_FLT_FN (BUILT_IN_LRINT):
1055 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1056 CASE_FLT_FN (BUILT_IN_RINT):
1057 return !flag_rounding_math;
1065 /* Check whether we may negate an integer constant T without causing
1069 may_negate_without_overflow_p (tree t)
1071 unsigned HOST_WIDE_INT val;
1075 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1077 type = TREE_TYPE (t);
1078 if (TYPE_UNSIGNED (type))
1081 prec = TYPE_PRECISION (type);
1082 if (prec > HOST_BITS_PER_WIDE_INT)
1084 if (TREE_INT_CST_LOW (t) != 0)
1086 prec -= HOST_BITS_PER_WIDE_INT;
1087 val = TREE_INT_CST_HIGH (t);
1090 val = TREE_INT_CST_LOW (t);
1091 if (prec < HOST_BITS_PER_WIDE_INT)
1092 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1093 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1096 /* Determine whether an expression T can be cheaply negated using
1097 the function negate_expr without introducing undefined overflow. */
1100 negate_expr_p (tree t)
1107 type = TREE_TYPE (t);
1109 STRIP_SIGN_NOPS (t);
1110 switch (TREE_CODE (t))
1113 if (TYPE_OVERFLOW_WRAPS (type))
1116 /* Check that -CST will not overflow type. */
1117 return may_negate_without_overflow_p (t);
1119 return (INTEGRAL_TYPE_P (type)
1120 && TYPE_OVERFLOW_WRAPS (type));
1127 return negate_expr_p (TREE_REALPART (t))
1128 && negate_expr_p (TREE_IMAGPART (t));
1131 return negate_expr_p (TREE_OPERAND (t, 0))
1132 && negate_expr_p (TREE_OPERAND (t, 1));
1135 return negate_expr_p (TREE_OPERAND (t, 0));
1138 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1139 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1141 /* -(A + B) -> (-B) - A. */
1142 if (negate_expr_p (TREE_OPERAND (t, 1))
1143 && reorder_operands_p (TREE_OPERAND (t, 0),
1144 TREE_OPERAND (t, 1)))
1146 /* -(A + B) -> (-A) - B. */
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1150 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1151 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1152 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1));
1157 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1163 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1164 return negate_expr_p (TREE_OPERAND (t, 1))
1165 || negate_expr_p (TREE_OPERAND (t, 0));
1168 case TRUNC_DIV_EXPR:
1169 case ROUND_DIV_EXPR:
1170 case FLOOR_DIV_EXPR:
1172 case EXACT_DIV_EXPR:
1173 /* In general we can't negate A / B, because if A is INT_MIN and
1174 B is 1, we may turn this into INT_MIN / -1 which is undefined
1175 and actually traps on some architectures. But if overflow is
1176 undefined, we can negate, because - (INT_MIN / 1) is an
1178 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1179 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1181 return negate_expr_p (TREE_OPERAND (t, 1))
1182 || negate_expr_p (TREE_OPERAND (t, 0));
1185 /* Negate -((double)float) as (double)(-float). */
1186 if (TREE_CODE (type) == REAL_TYPE)
1188 tree tem = strip_float_extensions (t);
1190 return negate_expr_p (tem);
1195 /* Negate -f(x) as f(-x). */
1196 if (negate_mathfn_p (builtin_mathfn_code (t)))
1197 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1201 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1202 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1204 tree op1 = TREE_OPERAND (t, 1);
1205 if (TREE_INT_CST_HIGH (op1) == 0
1206 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1207 == TREE_INT_CST_LOW (op1))
1218 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1219 simplification is possible.
1220 If negate_expr_p would return true for T, NULL_TREE will never be
1224 fold_negate_expr (tree t)
1226 tree type = TREE_TYPE (t);
1229 switch (TREE_CODE (t))
1231 /* Convert - (~A) to A + 1. */
1233 if (INTEGRAL_TYPE_P (type))
1234 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1235 build_int_cst (type, 1));
1239 tem = fold_negate_const (t, type);
1240 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1241 || !TYPE_OVERFLOW_TRAPS (type))
1246 tem = fold_negate_const (t, type);
1247 /* Two's complement FP formats, such as c4x, may overflow. */
1248 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1254 tree rpart = negate_expr (TREE_REALPART (t));
1255 tree ipart = negate_expr (TREE_IMAGPART (t));
1257 if ((TREE_CODE (rpart) == REAL_CST
1258 && TREE_CODE (ipart) == REAL_CST)
1259 || (TREE_CODE (rpart) == INTEGER_CST
1260 && TREE_CODE (ipart) == INTEGER_CST))
1261 return build_complex (type, rpart, ipart);
1266 if (negate_expr_p (t))
1267 return fold_build2 (COMPLEX_EXPR, type,
1268 fold_negate_expr (TREE_OPERAND (t, 0)),
1269 fold_negate_expr (TREE_OPERAND (t, 1)));
1273 if (negate_expr_p (t))
1274 return fold_build1 (CONJ_EXPR, type,
1275 fold_negate_expr (TREE_OPERAND (t, 0)));
1279 return TREE_OPERAND (t, 0);
1282 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1283 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1285 /* -(A + B) -> (-B) - A. */
1286 if (negate_expr_p (TREE_OPERAND (t, 1))
1287 && reorder_operands_p (TREE_OPERAND (t, 0),
1288 TREE_OPERAND (t, 1)))
1290 tem = negate_expr (TREE_OPERAND (t, 1));
1291 return fold_build2 (MINUS_EXPR, type,
1292 tem, TREE_OPERAND (t, 0));
1295 /* -(A + B) -> (-A) - B. */
1296 if (negate_expr_p (TREE_OPERAND (t, 0)))
1298 tem = negate_expr (TREE_OPERAND (t, 0));
1299 return fold_build2 (MINUS_EXPR, type,
1300 tem, TREE_OPERAND (t, 1));
1306 /* - (A - B) -> B - A */
1307 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1308 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1309 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1310 return fold_build2 (MINUS_EXPR, type,
1311 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1315 if (TYPE_UNSIGNED (type))
1321 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1323 tem = TREE_OPERAND (t, 1);
1324 if (negate_expr_p (tem))
1325 return fold_build2 (TREE_CODE (t), type,
1326 TREE_OPERAND (t, 0), negate_expr (tem));
1327 tem = TREE_OPERAND (t, 0);
1328 if (negate_expr_p (tem))
1329 return fold_build2 (TREE_CODE (t), type,
1330 negate_expr (tem), TREE_OPERAND (t, 1));
1334 case TRUNC_DIV_EXPR:
1335 case ROUND_DIV_EXPR:
1336 case FLOOR_DIV_EXPR:
1338 case EXACT_DIV_EXPR:
1339 /* In general we can't negate A / B, because if A is INT_MIN and
1340 B is 1, we may turn this into INT_MIN / -1 which is undefined
1341 and actually traps on some architectures. But if overflow is
1342 undefined, we can negate, because - (INT_MIN / 1) is an
1344 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1346 const char * const warnmsg = G_("assuming signed overflow does not "
1347 "occur when negating a division");
1348 tem = TREE_OPERAND (t, 1);
1349 if (negate_expr_p (tem))
1351 if (INTEGRAL_TYPE_P (type)
1352 && (TREE_CODE (tem) != INTEGER_CST
1353 || integer_onep (tem)))
1354 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1355 return fold_build2 (TREE_CODE (t), type,
1356 TREE_OPERAND (t, 0), negate_expr (tem));
1358 tem = TREE_OPERAND (t, 0);
1359 if (negate_expr_p (tem))
1361 if (INTEGRAL_TYPE_P (type)
1362 && (TREE_CODE (tem) != INTEGER_CST
1363 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1364 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1365 return fold_build2 (TREE_CODE (t), type,
1366 negate_expr (tem), TREE_OPERAND (t, 1));
1372 /* Convert -((double)float) into (double)(-float). */
1373 if (TREE_CODE (type) == REAL_TYPE)
1375 tem = strip_float_extensions (t);
1376 if (tem != t && negate_expr_p (tem))
1377 return negate_expr (tem);
1382 /* Negate -f(x) as f(-x). */
1383 if (negate_mathfn_p (builtin_mathfn_code (t))
1384 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1388 fndecl = get_callee_fndecl (t);
1389 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1390 return build_call_expr (fndecl, 1, arg);
1395 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1396 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1398 tree op1 = TREE_OPERAND (t, 1);
1399 if (TREE_INT_CST_HIGH (op1) == 0
1400 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1401 == TREE_INT_CST_LOW (op1))
1403 tree ntype = TYPE_UNSIGNED (type)
1404 ? lang_hooks.types.signed_type (type)
1405 : lang_hooks.types.unsigned_type (type);
1406 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1407 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1408 return fold_convert (type, temp);
1420 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1421 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1422 return NULL_TREE. */
1425 negate_expr (tree t)
1432 type = TREE_TYPE (t);
1433 STRIP_SIGN_NOPS (t);
1435 tem = fold_negate_expr (t);
1437 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1438 return fold_convert (type, tem);
1441 /* Split a tree IN into a constant, literal and variable parts that could be
1442 combined with CODE to make IN. "constant" means an expression with
1443 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1444 commutative arithmetic operation. Store the constant part into *CONP,
1445 the literal in *LITP and return the variable part. If a part isn't
1446 present, set it to null. If the tree does not decompose in this way,
1447 return the entire tree as the variable part and the other parts as null.
1449 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1450 case, we negate an operand that was subtracted. Except if it is a
1451 literal for which we use *MINUS_LITP instead.
1453 If NEGATE_P is true, we are negating all of IN, again except a literal
1454 for which we use *MINUS_LITP instead.
1456 If IN is itself a literal or constant, return it as appropriate.
1458 Note that we do not guarantee that any of the three values will be the
1459 same type as IN, but they will have the same signedness and mode. */
1462 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1463 tree *minus_litp, int negate_p)
1471 /* Strip any conversions that don't change the machine mode or signedness. */
1472 STRIP_SIGN_NOPS (in);
1474 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1476 else if (TREE_CODE (in) == code
1477 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1478 /* We can associate addition and subtraction together (even
1479 though the C standard doesn't say so) for integers because
1480 the value is not affected. For reals, the value might be
1481 affected, so we can't. */
1482 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1483 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1485 tree op0 = TREE_OPERAND (in, 0);
1486 tree op1 = TREE_OPERAND (in, 1);
1487 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1488 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1490 /* First see if either of the operands is a literal, then a constant. */
1491 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1492 *litp = op0, op0 = 0;
1493 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1494 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1496 if (op0 != 0 && TREE_CONSTANT (op0))
1497 *conp = op0, op0 = 0;
1498 else if (op1 != 0 && TREE_CONSTANT (op1))
1499 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1501 /* If we haven't dealt with either operand, this is not a case we can
1502 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1503 if (op0 != 0 && op1 != 0)
1508 var = op1, neg_var_p = neg1_p;
1510 /* Now do any needed negations. */
1512 *minus_litp = *litp, *litp = 0;
1514 *conp = negate_expr (*conp);
1516 var = negate_expr (var);
1518 else if (TREE_CONSTANT (in))
1526 *minus_litp = *litp, *litp = 0;
1527 else if (*minus_litp)
1528 *litp = *minus_litp, *minus_litp = 0;
1529 *conp = negate_expr (*conp);
1530 var = negate_expr (var);
1536 /* Re-associate trees split by the above function. T1 and T2 are either
1537 expressions to associate or null. Return the new expression, if any. If
1538 we build an operation, do it in TYPE and with CODE. */
1541 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1548 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1549 try to fold this since we will have infinite recursion. But do
1550 deal with any NEGATE_EXPRs. */
1551 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1552 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1554 if (code == PLUS_EXPR)
1556 if (TREE_CODE (t1) == NEGATE_EXPR)
1557 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1558 fold_convert (type, TREE_OPERAND (t1, 0)));
1559 else if (TREE_CODE (t2) == NEGATE_EXPR)
1560 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1561 fold_convert (type, TREE_OPERAND (t2, 0)));
1562 else if (integer_zerop (t2))
1563 return fold_convert (type, t1);
1565 else if (code == MINUS_EXPR)
1567 if (integer_zerop (t2))
1568 return fold_convert (type, t1);
1571 return build2 (code, type, fold_convert (type, t1),
1572 fold_convert (type, t2));
1575 return fold_build2 (code, type, fold_convert (type, t1),
1576 fold_convert (type, t2));
1579 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1580 for use in int_const_binop, size_binop and size_diffop. */
1583 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1585 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1587 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1602 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1603 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1604 && TYPE_MODE (type1) == TYPE_MODE (type2);
1608 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1609 to produce a new constant. Return NULL_TREE if we don't know how
1610 to evaluate CODE at compile-time.
1612 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1615 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1617 unsigned HOST_WIDE_INT int1l, int2l;
1618 HOST_WIDE_INT int1h, int2h;
1619 unsigned HOST_WIDE_INT low;
1621 unsigned HOST_WIDE_INT garbagel;
1622 HOST_WIDE_INT garbageh;
1624 tree type = TREE_TYPE (arg1);
1625 int uns = TYPE_UNSIGNED (type);
1627 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1630 int1l = TREE_INT_CST_LOW (arg1);
1631 int1h = TREE_INT_CST_HIGH (arg1);
1632 int2l = TREE_INT_CST_LOW (arg2);
1633 int2h = TREE_INT_CST_HIGH (arg2);
1638 low = int1l | int2l, hi = int1h | int2h;
1642 low = int1l ^ int2l, hi = int1h ^ int2h;
1646 low = int1l & int2l, hi = int1h & int2h;
1652 /* It's unclear from the C standard whether shifts can overflow.
1653 The following code ignores overflow; perhaps a C standard
1654 interpretation ruling is needed. */
1655 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1662 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1667 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1671 neg_double (int2l, int2h, &low, &hi);
1672 add_double (int1l, int1h, low, hi, &low, &hi);
1673 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1677 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1680 case TRUNC_DIV_EXPR:
1681 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1682 case EXACT_DIV_EXPR:
1683 /* This is a shortcut for a common special case. */
1684 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1685 && !TREE_OVERFLOW (arg1)
1686 && !TREE_OVERFLOW (arg2)
1687 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1689 if (code == CEIL_DIV_EXPR)
1692 low = int1l / int2l, hi = 0;
1696 /* ... fall through ... */
1698 case ROUND_DIV_EXPR:
1699 if (int2h == 0 && int2l == 0)
1701 if (int2h == 0 && int2l == 1)
1703 low = int1l, hi = int1h;
1706 if (int1l == int2l && int1h == int2h
1707 && ! (int1l == 0 && int1h == 0))
1712 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1713 &low, &hi, &garbagel, &garbageh);
1716 case TRUNC_MOD_EXPR:
1717 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1718 /* This is a shortcut for a common special case. */
1719 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1720 && !TREE_OVERFLOW (arg1)
1721 && !TREE_OVERFLOW (arg2)
1722 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1724 if (code == CEIL_MOD_EXPR)
1726 low = int1l % int2l, hi = 0;
1730 /* ... fall through ... */
1732 case ROUND_MOD_EXPR:
1733 if (int2h == 0 && int2l == 0)
1735 overflow = div_and_round_double (code, uns,
1736 int1l, int1h, int2l, int2h,
1737 &garbagel, &garbageh, &low, &hi);
1743 low = (((unsigned HOST_WIDE_INT) int1h
1744 < (unsigned HOST_WIDE_INT) int2h)
1745 || (((unsigned HOST_WIDE_INT) int1h
1746 == (unsigned HOST_WIDE_INT) int2h)
1749 low = (int1h < int2h
1750 || (int1h == int2h && int1l < int2l));
1752 if (low == (code == MIN_EXPR))
1753 low = int1l, hi = int1h;
1755 low = int2l, hi = int2h;
1764 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1766 /* Propagate overflow flags ourselves. */
1767 if (((!uns || is_sizetype) && overflow)
1768 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1771 TREE_OVERFLOW (t) = 1;
1775 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1776 ((!uns || is_sizetype) && overflow)
1777 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1782 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1783 constant. We assume ARG1 and ARG2 have the same data type, or at least
1784 are the same kind of constant and the same machine mode. Return zero if
1785 combining the constants is not allowed in the current operating mode.
1787 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1790 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1792 /* Sanity check for the recursive cases. */
1799 if (TREE_CODE (arg1) == INTEGER_CST)
1800 return int_const_binop (code, arg1, arg2, notrunc);
1802 if (TREE_CODE (arg1) == REAL_CST)
1804 enum machine_mode mode;
1807 REAL_VALUE_TYPE value;
1808 REAL_VALUE_TYPE result;
1812 /* The following codes are handled by real_arithmetic. */
1827 d1 = TREE_REAL_CST (arg1);
1828 d2 = TREE_REAL_CST (arg2);
1830 type = TREE_TYPE (arg1);
1831 mode = TYPE_MODE (type);
1833 /* Don't perform operation if we honor signaling NaNs and
1834 either operand is a NaN. */
1835 if (HONOR_SNANS (mode)
1836 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1839 /* Don't perform operation if it would raise a division
1840 by zero exception. */
1841 if (code == RDIV_EXPR
1842 && REAL_VALUES_EQUAL (d2, dconst0)
1843 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1846 /* If either operand is a NaN, just return it. Otherwise, set up
1847 for floating-point trap; we return an overflow. */
1848 if (REAL_VALUE_ISNAN (d1))
1850 else if (REAL_VALUE_ISNAN (d2))
1853 inexact = real_arithmetic (&value, code, &d1, &d2);
1854 real_convert (&result, mode, &value);
1856 /* Don't constant fold this floating point operation if
1857 the result has overflowed and flag_trapping_math. */
1858 if (flag_trapping_math
1859 && MODE_HAS_INFINITIES (mode)
1860 && REAL_VALUE_ISINF (result)
1861 && !REAL_VALUE_ISINF (d1)
1862 && !REAL_VALUE_ISINF (d2))
1865 /* Don't constant fold this floating point operation if the
1866 result may dependent upon the run-time rounding mode and
1867 flag_rounding_math is set, or if GCC's software emulation
1868 is unable to accurately represent the result. */
1869 if ((flag_rounding_math
1870 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1871 && !flag_unsafe_math_optimizations))
1872 && (inexact || !real_identical (&result, &value)))
1875 t = build_real (type, result);
1877 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1881 if (TREE_CODE (arg1) == COMPLEX_CST)
1883 tree type = TREE_TYPE (arg1);
1884 tree r1 = TREE_REALPART (arg1);
1885 tree i1 = TREE_IMAGPART (arg1);
1886 tree r2 = TREE_REALPART (arg2);
1887 tree i2 = TREE_IMAGPART (arg2);
1894 real = const_binop (code, r1, r2, notrunc);
1895 imag = const_binop (code, i1, i2, notrunc);
1899 real = const_binop (MINUS_EXPR,
1900 const_binop (MULT_EXPR, r1, r2, notrunc),
1901 const_binop (MULT_EXPR, i1, i2, notrunc),
1903 imag = const_binop (PLUS_EXPR,
1904 const_binop (MULT_EXPR, r1, i2, notrunc),
1905 const_binop (MULT_EXPR, i1, r2, notrunc),
1912 = const_binop (PLUS_EXPR,
1913 const_binop (MULT_EXPR, r2, r2, notrunc),
1914 const_binop (MULT_EXPR, i2, i2, notrunc),
1917 = const_binop (PLUS_EXPR,
1918 const_binop (MULT_EXPR, r1, r2, notrunc),
1919 const_binop (MULT_EXPR, i1, i2, notrunc),
1922 = const_binop (MINUS_EXPR,
1923 const_binop (MULT_EXPR, i1, r2, notrunc),
1924 const_binop (MULT_EXPR, r1, i2, notrunc),
1927 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1928 code = TRUNC_DIV_EXPR;
1930 real = const_binop (code, t1, magsquared, notrunc);
1931 imag = const_binop (code, t2, magsquared, notrunc);
1940 return build_complex (type, real, imag);
1946 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1947 indicates which particular sizetype to create. */
1950 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1952 return build_int_cst (sizetype_tab[(int) kind], number);
1955 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1956 is a tree code. The type of the result is taken from the operands.
1957 Both must be equivalent integer types, ala int_binop_types_match_p.
1958 If the operands are constant, so is the result. */
1961 size_binop (enum tree_code code, tree arg0, tree arg1)
1963 tree type = TREE_TYPE (arg0);
1965 if (arg0 == error_mark_node || arg1 == error_mark_node)
1966 return error_mark_node;
1968 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1971 /* Handle the special case of two integer constants faster. */
1972 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1974 /* And some specific cases even faster than that. */
1975 if (code == PLUS_EXPR)
1977 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1979 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1982 else if (code == MINUS_EXPR)
1984 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1987 else if (code == MULT_EXPR)
1989 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1993 /* Handle general case of two integer constants. */
1994 return int_const_binop (code, arg0, arg1, 0);
1997 return fold_build2 (code, type, arg0, arg1);
2000 /* Given two values, either both of sizetype or both of bitsizetype,
2001 compute the difference between the two values. Return the value
2002 in signed type corresponding to the type of the operands. */
2005 size_diffop (tree arg0, tree arg1)
2007 tree type = TREE_TYPE (arg0);
2010 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2013 /* If the type is already signed, just do the simple thing. */
2014 if (!TYPE_UNSIGNED (type))
2015 return size_binop (MINUS_EXPR, arg0, arg1);
2017 if (type == sizetype)
2019 else if (type == bitsizetype)
2020 ctype = sbitsizetype;
2022 ctype = lang_hooks.types.signed_type (type);
2024 /* If either operand is not a constant, do the conversions to the signed
2025 type and subtract. The hardware will do the right thing with any
2026 overflow in the subtraction. */
2027 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2028 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2029 fold_convert (ctype, arg1));
2031 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2032 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2033 overflow) and negate (which can't either). Special-case a result
2034 of zero while we're here. */
2035 if (tree_int_cst_equal (arg0, arg1))
2036 return build_int_cst (ctype, 0);
2037 else if (tree_int_cst_lt (arg1, arg0))
2038 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2040 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2041 fold_convert (ctype, size_binop (MINUS_EXPR,
2045 /* A subroutine of fold_convert_const handling conversions of an
2046 INTEGER_CST to another integer type. */
2049 fold_convert_const_int_from_int (tree type, tree arg1)
2053 /* Given an integer constant, make new constant with new type,
2054 appropriately sign-extended or truncated. */
2055 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2056 TREE_INT_CST_HIGH (arg1),
2057 /* Don't set the overflow when
2058 converting a pointer */
2059 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2060 (TREE_INT_CST_HIGH (arg1) < 0
2061 && (TYPE_UNSIGNED (type)
2062 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2063 | TREE_OVERFLOW (arg1));
2068 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2069 to an integer type. */
2072 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2077 /* The following code implements the floating point to integer
2078 conversion rules required by the Java Language Specification,
2079 that IEEE NaNs are mapped to zero and values that overflow
2080 the target precision saturate, i.e. values greater than
2081 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2082 are mapped to INT_MIN. These semantics are allowed by the
2083 C and C++ standards that simply state that the behavior of
2084 FP-to-integer conversion is unspecified upon overflow. */
2086 HOST_WIDE_INT high, low;
2088 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2092 case FIX_TRUNC_EXPR:
2093 real_trunc (&r, VOIDmode, &x);
2100 /* If R is NaN, return zero and show we have an overflow. */
2101 if (REAL_VALUE_ISNAN (r))
2108 /* See if R is less than the lower bound or greater than the
2113 tree lt = TYPE_MIN_VALUE (type);
2114 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2115 if (REAL_VALUES_LESS (r, l))
2118 high = TREE_INT_CST_HIGH (lt);
2119 low = TREE_INT_CST_LOW (lt);
2125 tree ut = TYPE_MAX_VALUE (type);
2128 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2129 if (REAL_VALUES_LESS (u, r))
2132 high = TREE_INT_CST_HIGH (ut);
2133 low = TREE_INT_CST_LOW (ut);
2139 REAL_VALUE_TO_INT (&low, &high, r);
2141 t = force_fit_type_double (type, low, high, -1,
2142 overflow | TREE_OVERFLOW (arg1));
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to another floating point type. */
2150 fold_convert_const_real_from_real (tree type, tree arg1)
2152 REAL_VALUE_TYPE value;
2155 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2156 t = build_real (type, value);
2158 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2162 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2163 type TYPE. If no simplification can be done return NULL_TREE. */
2166 fold_convert_const (enum tree_code code, tree type, tree arg1)
2168 if (TREE_TYPE (arg1) == type)
2171 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2173 if (TREE_CODE (arg1) == INTEGER_CST)
2174 return fold_convert_const_int_from_int (type, arg1);
2175 else if (TREE_CODE (arg1) == REAL_CST)
2176 return fold_convert_const_int_from_real (code, type, arg1);
2178 else if (TREE_CODE (type) == REAL_TYPE)
2180 if (TREE_CODE (arg1) == INTEGER_CST)
2181 return build_real_from_int_cst (type, arg1);
2182 if (TREE_CODE (arg1) == REAL_CST)
2183 return fold_convert_const_real_from_real (type, arg1);
2188 /* Construct a vector of zero elements of vector type TYPE. */
2191 build_zero_vector (tree type)
2196 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2197 units = TYPE_VECTOR_SUBPARTS (type);
2200 for (i = 0; i < units; i++)
2201 list = tree_cons (NULL_TREE, elem, list);
2202 return build_vector (type, list);
2205 /* Convert expression ARG to type TYPE. Used by the middle-end for
2206 simple conversions in preference to calling the front-end's convert. */
2209 fold_convert (tree type, tree arg)
2211 tree orig = TREE_TYPE (arg);
2217 if (TREE_CODE (arg) == ERROR_MARK
2218 || TREE_CODE (type) == ERROR_MARK
2219 || TREE_CODE (orig) == ERROR_MARK)
2220 return error_mark_node;
2222 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2223 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2224 TYPE_MAIN_VARIANT (orig)))
2225 return fold_build1 (NOP_EXPR, type, arg);
2227 switch (TREE_CODE (type))
2229 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2230 case POINTER_TYPE: case REFERENCE_TYPE:
2232 if (TREE_CODE (arg) == INTEGER_CST)
2234 tem = fold_convert_const (NOP_EXPR, type, arg);
2235 if (tem != NULL_TREE)
2238 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2239 || TREE_CODE (orig) == OFFSET_TYPE)
2240 return fold_build1 (NOP_EXPR, type, arg);
2241 if (TREE_CODE (orig) == COMPLEX_TYPE)
2243 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2244 return fold_convert (type, tem);
2246 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2247 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2248 return fold_build1 (NOP_EXPR, type, arg);
2251 if (TREE_CODE (arg) == INTEGER_CST)
2253 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2257 else if (TREE_CODE (arg) == REAL_CST)
2259 tem = fold_convert_const (NOP_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2264 switch (TREE_CODE (orig))
2267 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2268 case POINTER_TYPE: case REFERENCE_TYPE:
2269 return fold_build1 (FLOAT_EXPR, type, arg);
2272 return fold_build1 (NOP_EXPR, type, arg);
2275 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2276 return fold_convert (type, tem);
2283 switch (TREE_CODE (orig))
2286 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2287 case POINTER_TYPE: case REFERENCE_TYPE:
2289 return build2 (COMPLEX_EXPR, type,
2290 fold_convert (TREE_TYPE (type), arg),
2291 fold_convert (TREE_TYPE (type), integer_zero_node));
2296 if (TREE_CODE (arg) == COMPLEX_EXPR)
2298 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2299 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2300 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2303 arg = save_expr (arg);
2304 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2305 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2306 rpart = fold_convert (TREE_TYPE (type), rpart);
2307 ipart = fold_convert (TREE_TYPE (type), ipart);
2308 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2316 if (integer_zerop (arg))
2317 return build_zero_vector (type);
2318 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2319 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2320 || TREE_CODE (orig) == VECTOR_TYPE);
2321 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2324 tem = fold_ignored_result (arg);
2325 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2327 return fold_build1 (NOP_EXPR, type, tem);
2334 /* Return false if expr can be assumed not to be an lvalue, true
2338 maybe_lvalue_p (tree x)
2340 /* We only need to wrap lvalue tree codes. */
2341 switch (TREE_CODE (x))
2352 case ALIGN_INDIRECT_REF:
2353 case MISALIGNED_INDIRECT_REF:
2355 case ARRAY_RANGE_REF:
2361 case PREINCREMENT_EXPR:
2362 case PREDECREMENT_EXPR:
2364 case TRY_CATCH_EXPR:
2365 case WITH_CLEANUP_EXPR:
2368 case GIMPLE_MODIFY_STMT:
2377 /* Assume the worst for front-end tree codes. */
2378 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2386 /* Return an expr equal to X but certainly not valid as an lvalue. */
2391 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2396 if (! maybe_lvalue_p (x))
2398 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2401 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2402 Zero means allow extended lvalues. */
2404 int pedantic_lvalues;
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2410 pedantic_non_lvalue (tree x)
2412 if (pedantic_lvalues)
2413 return non_lvalue (x);
2418 /* Given a tree comparison code, return the code that is the logical inverse
2419 of the given code. It is not safe to do this for floating-point
2420 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2421 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2424 invert_tree_comparison (enum tree_code code, bool honor_nans)
2426 if (honor_nans && flag_trapping_math)
2436 return honor_nans ? UNLE_EXPR : LE_EXPR;
2438 return honor_nans ? UNLT_EXPR : LT_EXPR;
2440 return honor_nans ? UNGE_EXPR : GE_EXPR;
2442 return honor_nans ? UNGT_EXPR : GT_EXPR;
2456 return UNORDERED_EXPR;
2457 case UNORDERED_EXPR:
2458 return ORDERED_EXPR;
2464 /* Similar, but return the comparison that results if the operands are
2465 swapped. This is safe for floating-point. */
2468 swap_tree_comparison (enum tree_code code)
2475 case UNORDERED_EXPR:
2501 /* Convert a comparison tree code from an enum tree_code representation
2502 into a compcode bit-based encoding. This function is the inverse of
2503 compcode_to_comparison. */
2505 static enum comparison_code
2506 comparison_to_compcode (enum tree_code code)
2523 return COMPCODE_ORD;
2524 case UNORDERED_EXPR:
2525 return COMPCODE_UNORD;
2527 return COMPCODE_UNLT;
2529 return COMPCODE_UNEQ;
2531 return COMPCODE_UNLE;
2533 return COMPCODE_UNGT;
2535 return COMPCODE_LTGT;
2537 return COMPCODE_UNGE;
2543 /* Convert a compcode bit-based encoding of a comparison operator back
2544 to GCC's enum tree_code representation. This function is the
2545 inverse of comparison_to_compcode. */
2547 static enum tree_code
2548 compcode_to_comparison (enum comparison_code code)
2565 return ORDERED_EXPR;
2566 case COMPCODE_UNORD:
2567 return UNORDERED_EXPR;
2585 /* Return a tree for the comparison which is the combination of
2586 doing the AND or OR (depending on CODE) of the two operations LCODE
2587 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2588 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2589 if this makes the transformation invalid. */
2592 combine_comparisons (enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2596 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 enum comparison_code compcode;
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2625 else if (flag_trapping_math)
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2665 return fold_build2 (compcode_to_comparison (compcode),
2666 truth_type, ll_arg, lr_arg);
2669 /* Return nonzero if CODE is a tree code that represents a truth value. */
2672 truth_value_p (enum tree_code code)
2674 return (TREE_CODE_CLASS (code) == tcc_comparison
2675 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2676 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2677 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2680 /* Return nonzero if two operands (typically of the same tree node)
2681 are necessarily equal. If either argument has side-effects this
2682 function returns zero. FLAGS modifies behavior as follows:
2684 If OEP_ONLY_CONST is set, only return nonzero for constants.
2685 This function tests whether the operands are indistinguishable;
2686 it does not test whether they are equal using C's == operation.
2687 The distinction is important for IEEE floating point, because
2688 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2689 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2691 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2692 even though it may hold multiple values during a function.
2693 This is because a GCC tree node guarantees that nothing else is
2694 executed between the evaluation of its "operands" (which may often
2695 be evaluated in arbitrary order). Hence if the operands themselves
2696 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2697 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2698 unset means assuming isochronic (or instantaneous) tree equivalence.
2699 Unless comparing arbitrary expression trees, such as from different
2700 statements, this flag can usually be left unset.
2702 If OEP_PURE_SAME is set, then pure functions with identical arguments
2703 are considered the same. It is used when the caller has other ways
2704 to ensure that global memory is unchanged in between. */
2707 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2709 /* If either is ERROR_MARK, they aren't equal. */
2710 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2713 /* If both types don't have the same signedness, then we can't consider
2714 them equal. We must check this before the STRIP_NOPS calls
2715 because they may change the signedness of the arguments. */
2716 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2719 /* If both types don't have the same precision, then it is not safe
2721 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2727 /* In case both args are comparisons but with different comparison
2728 code, try to swap the comparison operands of one arg to produce
2729 a match and compare that variant. */
2730 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2731 && COMPARISON_CLASS_P (arg0)
2732 && COMPARISON_CLASS_P (arg1))
2734 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2736 if (TREE_CODE (arg0) == swap_code)
2737 return operand_equal_p (TREE_OPERAND (arg0, 0),
2738 TREE_OPERAND (arg1, 1), flags)
2739 && operand_equal_p (TREE_OPERAND (arg0, 1),
2740 TREE_OPERAND (arg1, 0), flags);
2743 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2744 /* This is needed for conversions and for COMPONENT_REF.
2745 Might as well play it safe and always test this. */
2746 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2747 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2748 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2751 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2752 We don't care about side effects in that case because the SAVE_EXPR
2753 takes care of that for us. In all other cases, two expressions are
2754 equal if they have no side effects. If we have two identical
2755 expressions with side effects that should be treated the same due
2756 to the only side effects being identical SAVE_EXPR's, that will
2757 be detected in the recursive calls below. */
2758 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2759 && (TREE_CODE (arg0) == SAVE_EXPR
2760 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2763 /* Next handle constant cases, those for which we can return 1 even
2764 if ONLY_CONST is set. */
2765 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2766 switch (TREE_CODE (arg0))
2769 return tree_int_cst_equal (arg0, arg1);
2772 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2773 TREE_REAL_CST (arg1)))
2777 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2779 /* If we do not distinguish between signed and unsigned zero,
2780 consider them equal. */
2781 if (real_zerop (arg0) && real_zerop (arg1))
2790 v1 = TREE_VECTOR_CST_ELTS (arg0);
2791 v2 = TREE_VECTOR_CST_ELTS (arg1);
2794 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2797 v1 = TREE_CHAIN (v1);
2798 v2 = TREE_CHAIN (v2);
2805 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2807 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2811 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2812 && ! memcmp (TREE_STRING_POINTER (arg0),
2813 TREE_STRING_POINTER (arg1),
2814 TREE_STRING_LENGTH (arg0)));
2817 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2823 if (flags & OEP_ONLY_CONST)
2826 /* Define macros to test an operand from arg0 and arg1 for equality and a
2827 variant that allows null and views null as being different from any
2828 non-null value. In the latter case, if either is null, the both
2829 must be; otherwise, do the normal comparison. */
2830 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2831 TREE_OPERAND (arg1, N), flags)
2833 #define OP_SAME_WITH_NULL(N) \
2834 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2835 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2837 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2840 /* Two conversions are equal only if signedness and modes match. */
2841 switch (TREE_CODE (arg0))
2845 case FIX_TRUNC_EXPR:
2846 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2847 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2857 case tcc_comparison:
2859 if (OP_SAME (0) && OP_SAME (1))
2862 /* For commutative ops, allow the other order. */
2863 return (commutative_tree_code (TREE_CODE (arg0))
2864 && operand_equal_p (TREE_OPERAND (arg0, 0),
2865 TREE_OPERAND (arg1, 1), flags)
2866 && operand_equal_p (TREE_OPERAND (arg0, 1),
2867 TREE_OPERAND (arg1, 0), flags));
2870 /* If either of the pointer (or reference) expressions we are
2871 dereferencing contain a side effect, these cannot be equal. */
2872 if (TREE_SIDE_EFFECTS (arg0)
2873 || TREE_SIDE_EFFECTS (arg1))
2876 switch (TREE_CODE (arg0))
2879 case ALIGN_INDIRECT_REF:
2880 case MISALIGNED_INDIRECT_REF:
2886 case ARRAY_RANGE_REF:
2887 /* Operands 2 and 3 may be null.
2888 Compare the array index by value if it is constant first as we
2889 may have different types but same value here. */
2891 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2892 TREE_OPERAND (arg1, 1))
2894 && OP_SAME_WITH_NULL (2)
2895 && OP_SAME_WITH_NULL (3));
2898 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2899 may be NULL when we're called to compare MEM_EXPRs. */
2900 return OP_SAME_WITH_NULL (0)
2902 && OP_SAME_WITH_NULL (2);
2905 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2911 case tcc_expression:
2912 switch (TREE_CODE (arg0))
2915 case TRUTH_NOT_EXPR:
2918 case TRUTH_ANDIF_EXPR:
2919 case TRUTH_ORIF_EXPR:
2920 return OP_SAME (0) && OP_SAME (1);
2922 case TRUTH_AND_EXPR:
2924 case TRUTH_XOR_EXPR:
2925 if (OP_SAME (0) && OP_SAME (1))
2928 /* Otherwise take into account this is a commutative operation. */
2929 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2930 TREE_OPERAND (arg1, 1), flags)
2931 && operand_equal_p (TREE_OPERAND (arg0, 1),
2932 TREE_OPERAND (arg1, 0), flags));
2939 switch (TREE_CODE (arg0))
2942 /* If the CALL_EXPRs call different functions, then they
2943 clearly can not be equal. */
2944 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2949 unsigned int cef = call_expr_flags (arg0);
2950 if (flags & OEP_PURE_SAME)
2951 cef &= ECF_CONST | ECF_PURE;
2958 /* Now see if all the arguments are the same. */
2960 call_expr_arg_iterator iter0, iter1;
2962 for (a0 = first_call_expr_arg (arg0, &iter0),
2963 a1 = first_call_expr_arg (arg1, &iter1);
2965 a0 = next_call_expr_arg (&iter0),
2966 a1 = next_call_expr_arg (&iter1))
2967 if (! operand_equal_p (a0, a1, flags))
2970 /* If we get here and both argument lists are exhausted
2971 then the CALL_EXPRs are equal. */
2972 return ! (a0 || a1);
2978 case tcc_declaration:
2979 /* Consider __builtin_sqrt equal to sqrt. */
2980 return (TREE_CODE (arg0) == FUNCTION_DECL
2981 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2982 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2983 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2990 #undef OP_SAME_WITH_NULL
2993 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2994 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2996 When in doubt, return 0. */
2999 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3001 int unsignedp1, unsignedpo;
3002 tree primarg0, primarg1, primother;
3003 unsigned int correct_width;
3005 if (operand_equal_p (arg0, arg1, 0))
3008 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3009 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3012 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3013 and see if the inner values are the same. This removes any
3014 signedness comparison, which doesn't matter here. */
3015 primarg0 = arg0, primarg1 = arg1;
3016 STRIP_NOPS (primarg0);
3017 STRIP_NOPS (primarg1);
3018 if (operand_equal_p (primarg0, primarg1, 0))
3021 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3022 actual comparison operand, ARG0.
3024 First throw away any conversions to wider types
3025 already present in the operands. */
3027 primarg1 = get_narrower (arg1, &unsignedp1);
3028 primother = get_narrower (other, &unsignedpo);
3030 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3031 if (unsignedp1 == unsignedpo
3032 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3033 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3035 tree type = TREE_TYPE (arg0);
3037 /* Make sure shorter operand is extended the right way
3038 to match the longer operand. */
3039 primarg1 = fold_convert (get_signed_or_unsigned_type
3040 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3042 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3049 /* See if ARG is an expression that is either a comparison or is performing
3050 arithmetic on comparisons. The comparisons must only be comparing
3051 two different values, which will be stored in *CVAL1 and *CVAL2; if
3052 they are nonzero it means that some operands have already been found.
3053 No variables may be used anywhere else in the expression except in the
3054 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3055 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3057 If this is true, return 1. Otherwise, return zero. */
3060 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3062 enum tree_code code = TREE_CODE (arg);
3063 enum tree_code_class class = TREE_CODE_CLASS (code);
3065 /* We can handle some of the tcc_expression cases here. */
3066 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3068 else if (class == tcc_expression
3069 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3070 || code == COMPOUND_EXPR))
3073 else if (class == tcc_expression && code == SAVE_EXPR
3074 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3076 /* If we've already found a CVAL1 or CVAL2, this expression is
3077 two complex to handle. */
3078 if (*cval1 || *cval2)
3088 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3091 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3092 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3093 cval1, cval2, save_p));
3098 case tcc_expression:
3099 if (code == COND_EXPR)
3100 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3101 cval1, cval2, save_p)
3102 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3103 cval1, cval2, save_p)
3104 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3105 cval1, cval2, save_p));
3108 case tcc_comparison:
3109 /* First see if we can handle the first operand, then the second. For
3110 the second operand, we know *CVAL1 can't be zero. It must be that
3111 one side of the comparison is each of the values; test for the
3112 case where this isn't true by failing if the two operands
3115 if (operand_equal_p (TREE_OPERAND (arg, 0),
3116 TREE_OPERAND (arg, 1), 0))
3120 *cval1 = TREE_OPERAND (arg, 0);
3121 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3123 else if (*cval2 == 0)
3124 *cval2 = TREE_OPERAND (arg, 0);
3125 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3130 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3132 else if (*cval2 == 0)
3133 *cval2 = TREE_OPERAND (arg, 1);
3134 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3146 /* ARG is a tree that is known to contain just arithmetic operations and
3147 comparisons. Evaluate the operations in the tree substituting NEW0 for
3148 any occurrence of OLD0 as an operand of a comparison and likewise for
3152 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3154 tree type = TREE_TYPE (arg);
3155 enum tree_code code = TREE_CODE (arg);
3156 enum tree_code_class class = TREE_CODE_CLASS (code);
3158 /* We can handle some of the tcc_expression cases here. */
3159 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3161 else if (class == tcc_expression
3162 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3168 return fold_build1 (code, type,
3169 eval_subst (TREE_OPERAND (arg, 0),
3170 old0, new0, old1, new1));
3173 return fold_build2 (code, type,
3174 eval_subst (TREE_OPERAND (arg, 0),
3175 old0, new0, old1, new1),
3176 eval_subst (TREE_OPERAND (arg, 1),
3177 old0, new0, old1, new1));
3179 case tcc_expression:
3183 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3186 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3189 return fold_build3 (code, type,
3190 eval_subst (TREE_OPERAND (arg, 0),
3191 old0, new0, old1, new1),
3192 eval_subst (TREE_OPERAND (arg, 1),
3193 old0, new0, old1, new1),
3194 eval_subst (TREE_OPERAND (arg, 2),
3195 old0, new0, old1, new1));
3199 /* Fall through - ??? */
3201 case tcc_comparison:
3203 tree arg0 = TREE_OPERAND (arg, 0);
3204 tree arg1 = TREE_OPERAND (arg, 1);
3206 /* We need to check both for exact equality and tree equality. The
3207 former will be true if the operand has a side-effect. In that
3208 case, we know the operand occurred exactly once. */
3210 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3212 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3215 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3217 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3220 return fold_build2 (code, type, arg0, arg1);
3228 /* Return a tree for the case when the result of an expression is RESULT
3229 converted to TYPE and OMITTED was previously an operand of the expression
3230 but is now not needed (e.g., we folded OMITTED * 0).
3232 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3233 the conversion of RESULT to TYPE. */
3236 omit_one_operand (tree type, tree result, tree omitted)
3238 tree t = fold_convert (type, result);
3240 if (TREE_SIDE_EFFECTS (omitted))
3241 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3243 return non_lvalue (t);
3246 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3249 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3251 tree t = fold_convert (type, result);
3253 if (TREE_SIDE_EFFECTS (omitted))
3254 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3256 return pedantic_non_lvalue (t);
3259 /* Return a tree for the case when the result of an expression is RESULT
3260 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3261 of the expression but are now not needed.
3263 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3264 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3265 evaluated before OMITTED2. Otherwise, if neither has side effects,
3266 just do the conversion of RESULT to TYPE. */
3269 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3271 tree t = fold_convert (type, result);
3273 if (TREE_SIDE_EFFECTS (omitted2))
3274 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3275 if (TREE_SIDE_EFFECTS (omitted1))
3276 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3278 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3282 /* Return a simplified tree node for the truth-negation of ARG. This
3283 never alters ARG itself. We assume that ARG is an operation that
3284 returns a truth value (0 or 1).
3286 FIXME: one would think we would fold the result, but it causes
3287 problems with the dominator optimizer. */
3290 fold_truth_not_expr (tree arg)
3292 tree type = TREE_TYPE (arg);
3293 enum tree_code code = TREE_CODE (arg);
3295 /* If this is a comparison, we can simply invert it, except for
3296 floating-point non-equality comparisons, in which case we just
3297 enclose a TRUTH_NOT_EXPR around what we have. */
3299 if (TREE_CODE_CLASS (code) == tcc_comparison)
3301 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3302 if (FLOAT_TYPE_P (op_type)
3303 && flag_trapping_math
3304 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3305 && code != NE_EXPR && code != EQ_EXPR)
3309 code = invert_tree_comparison (code,
3310 HONOR_NANS (TYPE_MODE (op_type)));
3311 if (code == ERROR_MARK)
3314 return build2 (code, type,
3315 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3322 return constant_boolean_node (integer_zerop (arg), type);
3324 case TRUTH_AND_EXPR:
3325 return build2 (TRUTH_OR_EXPR, type,
3326 invert_truthvalue (TREE_OPERAND (arg, 0)),
3327 invert_truthvalue (TREE_OPERAND (arg, 1)));
3330 return build2 (TRUTH_AND_EXPR, type,
3331 invert_truthvalue (TREE_OPERAND (arg, 0)),
3332 invert_truthvalue (TREE_OPERAND (arg, 1)));
3334 case TRUTH_XOR_EXPR:
3335 /* Here we can invert either operand. We invert the first operand
3336 unless the second operand is a TRUTH_NOT_EXPR in which case our
3337 result is the XOR of the first operand with the inside of the
3338 negation of the second operand. */
3340 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3341 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3342 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3344 return build2 (TRUTH_XOR_EXPR, type,
3345 invert_truthvalue (TREE_OPERAND (arg, 0)),
3346 TREE_OPERAND (arg, 1));
3348 case TRUTH_ANDIF_EXPR:
3349 return build2 (TRUTH_ORIF_EXPR, type,
3350 invert_truthvalue (TREE_OPERAND (arg, 0)),
3351 invert_truthvalue (TREE_OPERAND (arg, 1)));
3353 case TRUTH_ORIF_EXPR:
3354 return build2 (TRUTH_ANDIF_EXPR, type,
3355 invert_truthvalue (TREE_OPERAND (arg, 0)),
3356 invert_truthvalue (TREE_OPERAND (arg, 1)));
3358 case TRUTH_NOT_EXPR:
3359 return TREE_OPERAND (arg, 0);
3363 tree arg1 = TREE_OPERAND (arg, 1);
3364 tree arg2 = TREE_OPERAND (arg, 2);
3365 /* A COND_EXPR may have a throw as one operand, which
3366 then has void type. Just leave void operands
3368 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3369 VOID_TYPE_P (TREE_TYPE (arg1))
3370 ? arg1 : invert_truthvalue (arg1),
3371 VOID_TYPE_P (TREE_TYPE (arg2))
3372 ? arg2 : invert_truthvalue (arg2));
3376 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3377 invert_truthvalue (TREE_OPERAND (arg, 1)));
3379 case NON_LVALUE_EXPR:
3380 return invert_truthvalue (TREE_OPERAND (arg, 0));
3383 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3384 return build1 (TRUTH_NOT_EXPR, type, arg);
3388 return build1 (TREE_CODE (arg), type,
3389 invert_truthvalue (TREE_OPERAND (arg, 0)));
3392 if (!integer_onep (TREE_OPERAND (arg, 1)))
3394 return build2 (EQ_EXPR, type, arg,
3395 build_int_cst (type, 0));
3398 return build1 (TRUTH_NOT_EXPR, type, arg);
3400 case CLEANUP_POINT_EXPR:
3401 return build1 (CLEANUP_POINT_EXPR, type,
3402 invert_truthvalue (TREE_OPERAND (arg, 0)));
3411 /* Return a simplified tree node for the truth-negation of ARG. This
3412 never alters ARG itself. We assume that ARG is an operation that
3413 returns a truth value (0 or 1).
3415 FIXME: one would think we would fold the result, but it causes
3416 problems with the dominator optimizer. */
3419 invert_truthvalue (tree arg)
3423 if (TREE_CODE (arg) == ERROR_MARK)
3426 tem = fold_truth_not_expr (arg);
3428 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3433 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3434 operands are another bit-wise operation with a common input. If so,
3435 distribute the bit operations to save an operation and possibly two if
3436 constants are involved. For example, convert
3437 (A | B) & (A | C) into A | (B & C)
3438 Further simplification will occur if B and C are constants.
3440 If this optimization cannot be done, 0 will be returned. */
3443 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3448 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3449 || TREE_CODE (arg0) == code
3450 || (TREE_CODE (arg0) != BIT_AND_EXPR
3451 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3454 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3456 common = TREE_OPERAND (arg0, 0);
3457 left = TREE_OPERAND (arg0, 1);
3458 right = TREE_OPERAND (arg1, 1);
3460 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3462 common = TREE_OPERAND (arg0, 0);
3463 left = TREE_OPERAND (arg0, 1);
3464 right = TREE_OPERAND (arg1, 0);
3466 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3468 common = TREE_OPERAND (arg0, 1);
3469 left = TREE_OPERAND (arg0, 0);
3470 right = TREE_OPERAND (arg1, 1);
3472 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3474 common = TREE_OPERAND (arg0, 1);
3475 left = TREE_OPERAND (arg0, 0);
3476 right = TREE_OPERAND (arg1, 0);
3481 return fold_build2 (TREE_CODE (arg0), type, common,
3482 fold_build2 (code, type, left, right));
3485 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3486 with code CODE. This optimization is unsafe. */
3488 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3490 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3491 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3493 /* (A / C) +- (B / C) -> (A +- B) / C. */
3495 && operand_equal_p (TREE_OPERAND (arg0, 1),
3496 TREE_OPERAND (arg1, 1), 0))
3497 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3498 fold_build2 (code, type,
3499 TREE_OPERAND (arg0, 0),
3500 TREE_OPERAND (arg1, 0)),
3501 TREE_OPERAND (arg0, 1));
3503 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3504 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3505 TREE_OPERAND (arg1, 0), 0)
3506 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3507 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3509 REAL_VALUE_TYPE r0, r1;
3510 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3511 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3513 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3515 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3516 real_arithmetic (&r0, code, &r0, &r1);
3517 return fold_build2 (MULT_EXPR, type,
3518 TREE_OPERAND (arg0, 0),
3519 build_real (type, r0));
3525 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3526 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3529 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3536 tree size = TYPE_SIZE (TREE_TYPE (inner));
3537 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3538 || POINTER_TYPE_P (TREE_TYPE (inner)))
3539 && host_integerp (size, 0)
3540 && tree_low_cst (size, 0) == bitsize)
3541 return fold_convert (type, inner);
3544 result = build3 (BIT_FIELD_REF, type, inner,
3545 size_int (bitsize), bitsize_int (bitpos));
3547 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3552 /* Optimize a bit-field compare.
3554 There are two cases: First is a compare against a constant and the
3555 second is a comparison of two items where the fields are at the same
3556 bit position relative to the start of a chunk (byte, halfword, word)
3557 large enough to contain it. In these cases we can avoid the shift
3558 implicit in bitfield extractions.
3560 For constants, we emit a compare of the shifted constant with the
3561 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3562 compared. For two fields at the same position, we do the ANDs with the
3563 similar mask and compare the result of the ANDs.
3565 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3566 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3567 are the left and right operands of the comparison, respectively.
3569 If the optimization described above can be done, we return the resulting
3570 tree. Otherwise we return zero. */
3573 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3576 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3577 tree type = TREE_TYPE (lhs);
3578 tree signed_type, unsigned_type;
3579 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3580 enum machine_mode lmode, rmode, nmode;
3581 int lunsignedp, runsignedp;
3582 int lvolatilep = 0, rvolatilep = 0;
3583 tree linner, rinner = NULL_TREE;
3587 /* Get all the information about the extractions being done. If the bit size
3588 if the same as the size of the underlying object, we aren't doing an
3589 extraction at all and so can do nothing. We also don't want to
3590 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3591 then will no longer be able to replace it. */
3592 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3593 &lunsignedp, &lvolatilep, false);
3594 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3595 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3600 /* If this is not a constant, we can only do something if bit positions,
3601 sizes, and signedness are the same. */
3602 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3603 &runsignedp, &rvolatilep, false);
3605 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3606 || lunsignedp != runsignedp || offset != 0
3607 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3611 /* See if we can find a mode to refer to this field. We should be able to,
3612 but fail if we can't. */
3613 nmode = get_best_mode (lbitsize, lbitpos,
3614 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3615 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3616 TYPE_ALIGN (TREE_TYPE (rinner))),
3617 word_mode, lvolatilep || rvolatilep);
3618 if (nmode == VOIDmode)
3621 /* Set signed and unsigned types of the precision of this mode for the
3623 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3624 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3626 /* Compute the bit position and size for the new reference and our offset
3627 within it. If the new reference is the same size as the original, we
3628 won't optimize anything, so return zero. */
3629 nbitsize = GET_MODE_BITSIZE (nmode);
3630 nbitpos = lbitpos & ~ (nbitsize - 1);
3632 if (nbitsize == lbitsize)
3635 if (BYTES_BIG_ENDIAN)
3636 lbitpos = nbitsize - lbitsize - lbitpos;
3638 /* Make the mask to be used against the extracted field. */
3639 mask = build_int_cst_type (unsigned_type, -1);
3640 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3641 mask = const_binop (RSHIFT_EXPR, mask,
3642 size_int (nbitsize - lbitsize - lbitpos), 0);
3645 /* If not comparing with constant, just rework the comparison
3647 return fold_build2 (code, compare_type,
3648 fold_build2 (BIT_AND_EXPR, unsigned_type,
3649 make_bit_field_ref (linner,
3654 fold_build2 (BIT_AND_EXPR, unsigned_type,
3655 make_bit_field_ref (rinner,
3661 /* Otherwise, we are handling the constant case. See if the constant is too
3662 big for the field. Warn and return a tree of for 0 (false) if so. We do
3663 this not only for its own sake, but to avoid having to test for this
3664 error case below. If we didn't, we might generate wrong code.
3666 For unsigned fields, the constant shifted right by the field length should
3667 be all zero. For signed fields, the high-order bits should agree with
3672 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3673 fold_convert (unsigned_type, rhs),
3674 size_int (lbitsize), 0)))
3676 warning (0, "comparison is always %d due to width of bit-field",
3678 return constant_boolean_node (code == NE_EXPR, compare_type);
3683 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3684 size_int (lbitsize - 1), 0);
3685 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3687 warning (0, "comparison is always %d due to width of bit-field",
3689 return constant_boolean_node (code == NE_EXPR, compare_type);
3693 /* Single-bit compares should always be against zero. */
3694 if (lbitsize == 1 && ! integer_zerop (rhs))
3696 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3697 rhs = build_int_cst (type, 0);
3700 /* Make a new bitfield reference, shift the constant over the
3701 appropriate number of bits and mask it with the computed mask
3702 (in case this was a signed field). If we changed it, make a new one. */
3703 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3706 TREE_SIDE_EFFECTS (lhs) = 1;
3707 TREE_THIS_VOLATILE (lhs) = 1;
3710 rhs = const_binop (BIT_AND_EXPR,
3711 const_binop (LSHIFT_EXPR,
3712 fold_convert (unsigned_type, rhs),
3713 size_int (lbitpos), 0),
3716 return build2 (code, compare_type,
3717 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3721 /* Subroutine for fold_truthop: decode a field reference.
3723 If EXP is a comparison reference, we return the innermost reference.
3725 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3726 set to the starting bit number.
3728 If the innermost field can be completely contained in a mode-sized
3729 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3731 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3732 otherwise it is not changed.
3734 *PUNSIGNEDP is set to the signedness of the field.
3736 *PMASK is set to the mask used. This is either contained in a
3737 BIT_AND_EXPR or derived from the width of the field.
3739 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3741 Return 0 if this is not a component reference or is one that we can't
3742 do anything with. */
3745 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3746 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3747 int *punsignedp, int *pvolatilep,
3748 tree *pmask, tree *pand_mask)
3750 tree outer_type = 0;
3752 tree mask, inner, offset;
3754 unsigned int precision;
3756 /* All the optimizations using this function assume integer fields.
3757 There are problems with FP fields since the type_for_size call
3758 below can fail for, e.g., XFmode. */
3759 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3762 /* We are interested in the bare arrangement of bits, so strip everything
3763 that doesn't affect the machine mode. However, record the type of the
3764 outermost expression if it may matter below. */
3765 if (TREE_CODE (exp) == NOP_EXPR
3766 || TREE_CODE (exp) == CONVERT_EXPR
3767 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3768 outer_type = TREE_TYPE (exp);
3771 if (TREE_CODE (exp) == BIT_AND_EXPR)
3773 and_mask = TREE_OPERAND (exp, 1);
3774 exp = TREE_OPERAND (exp, 0);
3775 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3776 if (TREE_CODE (and_mask) != INTEGER_CST)
3780 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3781 punsignedp, pvolatilep, false);
3782 if ((inner == exp && and_mask == 0)
3783 || *pbitsize < 0 || offset != 0
3784 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3787 /* If the number of bits in the reference is the same as the bitsize of
3788 the outer type, then the outer type gives the signedness. Otherwise
3789 (in case of a small bitfield) the signedness is unchanged. */
3790 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3791 *punsignedp = TYPE_UNSIGNED (outer_type);
3793 /* Compute the mask to access the bitfield. */
3794 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3795 precision = TYPE_PRECISION (unsigned_type);
3797 mask = build_int_cst_type (unsigned_type, -1);
3799 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3800 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3802 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3804 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3805 fold_convert (unsigned_type, and_mask), mask);
3808 *pand_mask = and_mask;
3812 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3816 all_ones_mask_p (tree mask, int size)
3818 tree type = TREE_TYPE (mask);
3819 unsigned int precision = TYPE_PRECISION (type);
3822 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3825 tree_int_cst_equal (mask,
3826 const_binop (RSHIFT_EXPR,
3827 const_binop (LSHIFT_EXPR, tmask,
3828 size_int (precision - size),
3830 size_int (precision - size), 0));
3833 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3834 represents the sign bit of EXP's type. If EXP represents a sign
3835 or zero extension, also test VAL against the unextended type.
3836 The return value is the (sub)expression whose sign bit is VAL,
3837 or NULL_TREE otherwise. */
3840 sign_bit_p (tree exp, tree val)
3842 unsigned HOST_WIDE_INT mask_lo, lo;
3843 HOST_WIDE_INT mask_hi, hi;
3847 /* Tree EXP must have an integral type. */
3848 t = TREE_TYPE (exp);
3849 if (! INTEGRAL_TYPE_P (t))
3852 /* Tree VAL must be an integer constant. */
3853 if (TREE_CODE (val) != INTEGER_CST
3854 || TREE_OVERFLOW (val))
3857 width = TYPE_PRECISION (t);
3858 if (width > HOST_BITS_PER_WIDE_INT)
3860 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3863 mask_hi = ((unsigned HOST_WIDE_INT) -1
3864 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3870 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3873 mask_lo = ((unsigned HOST_WIDE_INT) -1
3874 >> (HOST_BITS_PER_WIDE_INT - width));
3877 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3878 treat VAL as if it were unsigned. */
3879 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3880 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3883 /* Handle extension from a narrower type. */
3884 if (TREE_CODE (exp) == NOP_EXPR
3885 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3886 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3891 /* Subroutine for fold_truthop: determine if an operand is simple enough
3892 to be evaluated unconditionally. */
3895 simple_operand_p (tree exp)
3897 /* Strip any conversions that don't change the machine mode. */
3900 return (CONSTANT_CLASS_P (exp)
3901 || TREE_CODE (exp) == SSA_NAME
3903 && ! TREE_ADDRESSABLE (exp)
3904 && ! TREE_THIS_VOLATILE (exp)
3905 && ! DECL_NONLOCAL (exp)
3906 /* Don't regard global variables as simple. They may be
3907 allocated in ways unknown to the compiler (shared memory,
3908 #pragma weak, etc). */
3909 && ! TREE_PUBLIC (exp)
3910 && ! DECL_EXTERNAL (exp)
3911 /* Loading a static variable is unduly expensive, but global
3912 registers aren't expensive. */
3913 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3916 /* The following functions are subroutines to fold_range_test and allow it to
3917 try to change a logical combination of comparisons into a range test.
3920 X == 2 || X == 3 || X == 4 || X == 5
3924 (unsigned) (X - 2) <= 3
3926 We describe each set of comparisons as being either inside or outside
3927 a range, using a variable named like IN_P, and then describe the
3928 range with a lower and upper bound. If one of the bounds is omitted,
3929 it represents either the highest or lowest value of the type.
3931 In the comments below, we represent a range by two numbers in brackets
3932 preceded by a "+" to designate being inside that range, or a "-" to
3933 designate being outside that range, so the condition can be inverted by
3934 flipping the prefix. An omitted bound is represented by a "-". For
3935 example, "- [-, 10]" means being outside the range starting at the lowest
3936 possible value and ending at 10, in other words, being greater than 10.
3937 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3940 We set up things so that the missing bounds are handled in a consistent
3941 manner so neither a missing bound nor "true" and "false" need to be
3942 handled using a special case. */
3944 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3945 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3946 and UPPER1_P are nonzero if the respective argument is an upper bound
3947 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3948 must be specified for a comparison. ARG1 will be converted to ARG0's
3949 type if both are specified. */
3952 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3953 tree arg1, int upper1_p)
3959 /* If neither arg represents infinity, do the normal operation.
3960 Else, if not a comparison, return infinity. Else handle the special
3961 comparison rules. Note that most of the cases below won't occur, but
3962 are handled for consistency. */
3964 if (arg0 != 0 && arg1 != 0)
3966 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3967 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3969 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3972 if (TREE_CODE_CLASS (code) != tcc_comparison)
3975 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3976 for neither. In real maths, we cannot assume open ended ranges are
3977 the same. But, this is computer arithmetic, where numbers are finite.
3978 We can therefore make the transformation of any unbounded range with
3979 the value Z, Z being greater than any representable number. This permits
3980 us to treat unbounded ranges as equal. */
3981 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3982 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3986 result = sgn0 == sgn1;
3989 result = sgn0 != sgn1;
3992 result = sgn0 < sgn1;
3995 result = sgn0 <= sgn1;
3998 result = sgn0 > sgn1;
4001 result = sgn0 >= sgn1;
4007 return constant_boolean_node (result, type);
4010 /* Given EXP, a logical expression, set the range it is testing into
4011 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4012 actually being tested. *PLOW and *PHIGH will be made of the same
4013 type as the returned expression. If EXP is not a comparison, we
4014 will most likely not be returning a useful value and range. Set
4015 *STRICT_OVERFLOW_P to true if the return value is only valid
4016 because signed overflow is undefined; otherwise, do not change
4017 *STRICT_OVERFLOW_P. */
4020 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4021 bool *strict_overflow_p)
4023 enum tree_code code;
4024 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4025 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4027 tree low, high, n_low, n_high;
4029 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4030 and see if we can refine the range. Some of the cases below may not
4031 happen, but it doesn't seem worth worrying about this. We "continue"
4032 the outer loop when we've changed something; otherwise we "break"
4033 the switch, which will "break" the while. */
4036 low = high = build_int_cst (TREE_TYPE (exp), 0);
4040 code = TREE_CODE (exp);
4041 exp_type = TREE_TYPE (exp);
4043 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4045 if (TREE_OPERAND_LENGTH (exp) > 0)
4046 arg0 = TREE_OPERAND (exp, 0);
4047 if (TREE_CODE_CLASS (code) == tcc_comparison
4048 || TREE_CODE_CLASS (code) == tcc_unary
4049 || TREE_CODE_CLASS (code) == tcc_binary)
4050 arg0_type = TREE_TYPE (arg0);
4051 if (TREE_CODE_CLASS (code) == tcc_binary
4052 || TREE_CODE_CLASS (code) == tcc_comparison
4053 || (TREE_CODE_CLASS (code) == tcc_expression
4054 && TREE_OPERAND_LENGTH (exp) > 1))
4055 arg1 = TREE_OPERAND (exp, 1);
4060 case TRUTH_NOT_EXPR:
4061 in_p = ! in_p, exp = arg0;
4064 case EQ_EXPR: case NE_EXPR:
4065 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4066 /* We can only do something if the range is testing for zero
4067 and if the second operand is an integer constant. Note that
4068 saying something is "in" the range we make is done by
4069 complementing IN_P since it will set in the initial case of
4070 being not equal to zero; "out" is leaving it alone. */
4071 if (low == 0 || high == 0
4072 || ! integer_zerop (low) || ! integer_zerop (high)
4073 || TREE_CODE (arg1) != INTEGER_CST)
4078 case NE_EXPR: /* - [c, c] */
4081 case EQ_EXPR: /* + [c, c] */
4082 in_p = ! in_p, low = high = arg1;
4084 case GT_EXPR: /* - [-, c] */
4085 low = 0, high = arg1;
4087 case GE_EXPR: /* + [c, -] */
4088 in_p = ! in_p, low = arg1, high = 0;
4090 case LT_EXPR: /* - [c, -] */
4091 low = arg1, high = 0;
4093 case LE_EXPR: /* + [-, c] */
4094 in_p = ! in_p, low = 0, high = arg1;
4100 /* If this is an unsigned comparison, we also know that EXP is
4101 greater than or equal to zero. We base the range tests we make
4102 on that fact, so we record it here so we can parse existing
4103 range tests. We test arg0_type since often the return type
4104 of, e.g. EQ_EXPR, is boolean. */
4105 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4107 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4109 build_int_cst (arg0_type, 0),
4113 in_p = n_in_p, low = n_low, high = n_high;
4115 /* If the high bound is missing, but we have a nonzero low
4116 bound, reverse the range so it goes from zero to the low bound
4118 if (high == 0 && low && ! integer_zerop (low))
4121 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4122 integer_one_node, 0);
4123 low = build_int_cst (arg0_type, 0);
4131 /* (-x) IN [a,b] -> x in [-b, -a] */
4132 n_low = range_binop (MINUS_EXPR, exp_type,
4133 build_int_cst (exp_type, 0),
4135 n_high = range_binop (MINUS_EXPR, exp_type,
4136 build_int_cst (exp_type, 0),
4138 low = n_low, high = n_high;
4144 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4145 build_int_cst (exp_type, 1));
4148 case PLUS_EXPR: case MINUS_EXPR:
4149 if (TREE_CODE (arg1) != INTEGER_CST)
4152 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4153 move a constant to the other side. */
4154 if (!TYPE_UNSIGNED (arg0_type)
4155 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4158 /* If EXP is signed, any overflow in the computation is undefined,
4159 so we don't worry about it so long as our computations on
4160 the bounds don't overflow. For unsigned, overflow is defined
4161 and this is exactly the right thing. */
4162 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4163 arg0_type, low, 0, arg1, 0);
4164 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4165 arg0_type, high, 1, arg1, 0);
4166 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4167 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4170 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4171 *strict_overflow_p = true;
4173 /* Check for an unsigned range which has wrapped around the maximum
4174 value thus making n_high < n_low, and normalize it. */
4175 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4177 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4178 integer_one_node, 0);
4179 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4180 integer_one_node, 0);
4182 /* If the range is of the form +/- [ x+1, x ], we won't
4183 be able to normalize it. But then, it represents the
4184 whole range or the empty set, so make it
4186 if (tree_int_cst_equal (n_low, low)
4187 && tree_int_cst_equal (n_high, high))
4193 low = n_low, high = n_high;
4198 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4199 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4202 if (! INTEGRAL_TYPE_P (arg0_type)
4203 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4204 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4207 n_low = low, n_high = high;
4210 n_low = fold_convert (arg0_type, n_low);
4213 n_high = fold_convert (arg0_type, n_high);
4216 /* If we're converting arg0 from an unsigned type, to exp,
4217 a signed type, we will be doing the comparison as unsigned.
4218 The tests above have already verified that LOW and HIGH
4221 So we have to ensure that we will handle large unsigned
4222 values the same way that the current signed bounds treat
4225 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4228 tree equiv_type = lang_hooks.types.type_for_mode
4229 (TYPE_MODE (arg0_type), 1);
4231 /* A range without an upper bound is, naturally, unbounded.
4232 Since convert would have cropped a very large value, use
4233 the max value for the destination type. */
4235 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4236 : TYPE_MAX_VALUE (arg0_type);
4238 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4239 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4240 fold_convert (arg0_type,
4242 build_int_cst (arg0_type, 1));
4244 /* If the low bound is specified, "and" the range with the
4245 range for which the original unsigned value will be
4249 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4250 1, n_low, n_high, 1,
4251 fold_convert (arg0_type,
4256 in_p = (n_in_p == in_p);
4260 /* Otherwise, "or" the range with the range of the input
4261 that will be interpreted as negative. */
4262 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4263 0, n_low, n_high, 1,
4264 fold_convert (arg0_type,
4269 in_p = (in_p != n_in_p);
4274 low = n_low, high = n_high;
4284 /* If EXP is a constant, we can evaluate whether this is true or false. */
4285 if (TREE_CODE (exp) == INTEGER_CST)
4287 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4289 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4295 *pin_p = in_p, *plow = low, *phigh = high;
4299 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4300 type, TYPE, return an expression to test if EXP is in (or out of, depending
4301 on IN_P) the range. Return 0 if the test couldn't be created. */
4304 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4306 tree etype = TREE_TYPE (exp);
4309 #ifdef HAVE_canonicalize_funcptr_for_compare
4310 /* Disable this optimization for function pointer expressions
4311 on targets that require function pointer canonicalization. */
4312 if (HAVE_canonicalize_funcptr_for_compare
4313 && TREE_CODE (etype) == POINTER_TYPE
4314 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4320 value = build_range_check (type, exp, 1, low, high);
4322 return invert_truthvalue (value);
4327 if (low == 0 && high == 0)
4328 return build_int_cst (type, 1);
4331 return fold_build2 (LE_EXPR, type, exp,
4332 fold_convert (etype, high));
4335 return fold_build2 (GE_EXPR, type, exp,
4336 fold_convert (etype, low));
4338 if (operand_equal_p (low, high, 0))
4339 return fold_build2 (EQ_EXPR, type, exp,
4340 fold_convert (etype, low));
4342 if (integer_zerop (low))
4344 if (! TYPE_UNSIGNED (etype))
4346 etype = lang_hooks.types.unsigned_type (etype);
4347 high = fold_convert (etype, high);
4348 exp = fold_convert (etype, exp);
4350 return build_range_check (type, exp, 1, 0, high);
4353 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4354 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4356 unsigned HOST_WIDE_INT lo;
4360 prec = TYPE_PRECISION (etype);
4361 if (prec <= HOST_BITS_PER_WIDE_INT)
4364 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4368 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4369 lo = (unsigned HOST_WIDE_INT) -1;
4372 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4374 if (TYPE_UNSIGNED (etype))
4376 etype = lang_hooks.types.signed_type (etype);
4377 exp = fold_convert (etype, exp);
4379 return fold_build2 (GT_EXPR, type, exp,
4380 build_int_cst (etype, 0));
4384 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4385 This requires wrap-around arithmetics for the type of the expression. */
4386 switch (TREE_CODE (etype))
4389 /* There is no requirement that LOW be within the range of ETYPE
4390 if the latter is a subtype. It must, however, be within the base
4391 type of ETYPE. So be sure we do the subtraction in that type. */
4392 if (TREE_TYPE (etype))
4393 etype = TREE_TYPE (etype);
4398 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4399 TYPE_UNSIGNED (etype));
4406 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4407 if (TREE_CODE (etype) == INTEGER_TYPE
4408 && !TYPE_OVERFLOW_WRAPS (etype))
4410 tree utype, minv, maxv;
4412 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4413 for the type in question, as we rely on this here. */
4414 utype = lang_hooks.types.unsigned_type (etype);
4415 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4416 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4417 integer_one_node, 1);
4418 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4420 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4427 high = fold_convert (etype, high);
4428 low = fold_convert (etype, low);
4429 exp = fold_convert (etype, exp);
4431 value = const_binop (MINUS_EXPR, high, low, 0);
4433 if (value != 0 && !TREE_OVERFLOW (value))
4434 return build_range_check (type,
4435 fold_build2 (MINUS_EXPR, etype, exp, low),
4436 1, build_int_cst (etype, 0), value);
4441 /* Return the predecessor of VAL in its type, handling the infinite case. */
4444 range_predecessor (tree val)
4446 tree type = TREE_TYPE (val);
4448 if (INTEGRAL_TYPE_P (type)
4449 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4452 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4455 /* Return the successor of VAL in its type, handling the infinite case. */
4458 range_successor (tree val)
4460 tree type = TREE_TYPE (val);
4462 if (INTEGRAL_TYPE_P (type)
4463 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4466 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4469 /* Given two ranges, see if we can merge them into one. Return 1 if we
4470 can, 0 if we can't. Set the output range into the specified parameters. */
4473 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4474 tree high0, int in1_p, tree low1, tree high1)
4482 int lowequal = ((low0 == 0 && low1 == 0)
4483 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4484 low0, 0, low1, 0)));
4485 int highequal = ((high0 == 0 && high1 == 0)
4486 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4487 high0, 1, high1, 1)));
4489 /* Make range 0 be the range that starts first, or ends last if they
4490 start at the same value. Swap them if it isn't. */
4491 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4494 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4495 high1, 1, high0, 1))))
4497 temp = in0_p, in0_p = in1_p, in1_p = temp;
4498 tem = low0, low0 = low1, low1 = tem;
4499 tem = high0, high0 = high1, high1 = tem;
4502 /* Now flag two cases, whether the ranges are disjoint or whether the
4503 second range is totally subsumed in the first. Note that the tests
4504 below are simplified by the ones above. */
4505 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4506 high0, 1, low1, 0));
4507 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4508 high1, 1, high0, 1));
4510 /* We now have four cases, depending on whether we are including or
4511 excluding the two ranges. */
4514 /* If they don't overlap, the result is false. If the second range
4515 is a subset it is the result. Otherwise, the range is from the start
4516 of the second to the end of the first. */
4518 in_p = 0, low = high = 0;
4520 in_p = 1, low = low1, high = high1;
4522 in_p = 1, low = low1, high = high0;
4525 else if (in0_p && ! in1_p)
4527 /* If they don't overlap, the result is the first range. If they are
4528 equal, the result is false. If the second range is a subset of the
4529 first, and the ranges begin at the same place, we go from just after
4530 the end of the second range to the end of the first. If the second
4531 range is not a subset of the first, or if it is a subset and both
4532 ranges end at the same place, the range starts at the start of the
4533 first range and ends just before the second range.
4534 Otherwise, we can't describe this as a single range. */
4536 in_p = 1, low = low0, high = high0;
4537 else if (lowequal && highequal)
4538 in_p = 0, low = high = 0;
4539 else if (subset && lowequal)
4541 low = range_successor (high1);
4545 else if (! subset || highequal)
4548 high = range_predecessor (low1);
4555 else if (! in0_p && in1_p)
4557 /* If they don't overlap, the result is the second range. If the second
4558 is a subset of the first, the result is false. Otherwise,
4559 the range starts just after the first range and ends at the
4560 end of the second. */
4562 in_p = 1, low = low1, high = high1;
4563 else if (subset || highequal)
4564 in_p = 0, low = high = 0;
4567 low = range_successor (high0);
4575 /* The case where we are excluding both ranges. Here the complex case
4576 is if they don't overlap. In that case, the only time we have a
4577 range is if they are adjacent. If the second is a subset of the
4578 first, the result is the first. Otherwise, the range to exclude
4579 starts at the beginning of the first range and ends at the end of the
4583 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4584 range_successor (high0),
4586 in_p = 0, low = low0, high = high1;
4589 /* Canonicalize - [min, x] into - [-, x]. */
4590 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4591 switch (TREE_CODE (TREE_TYPE (low0)))
4594 if (TYPE_PRECISION (TREE_TYPE (low0))
4595 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4599 if (tree_int_cst_equal (low0,
4600 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4604 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4605 && integer_zerop (low0))
4612 /* Canonicalize - [x, max] into - [x, -]. */
4613 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4614 switch (TREE_CODE (TREE_TYPE (high1)))
4617 if (TYPE_PRECISION (TREE_TYPE (high1))
4618 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4622 if (tree_int_cst_equal (high1,
4623 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4627 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4628 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4630 integer_one_node, 1)))
4637 /* The ranges might be also adjacent between the maximum and
4638 minimum values of the given type. For
4639 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4640 return + [x + 1, y - 1]. */
4641 if (low0 == 0 && high1 == 0)
4643 low = range_successor (high0);
4644 high = range_predecessor (low1);
4645 if (low == 0 || high == 0)
4655 in_p = 0, low = low0, high = high0;
4657 in_p = 0, low = low0, high = high1;
4660 *pin_p = in_p, *plow = low, *phigh = high;
4665 /* Subroutine of fold, looking inside expressions of the form
4666 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4667 of the COND_EXPR. This function is being used also to optimize
4668 A op B ? C : A, by reversing the comparison first.
4670 Return a folded expression whose code is not a COND_EXPR
4671 anymore, or NULL_TREE if no folding opportunity is found. */
4674 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4676 enum tree_code comp_code = TREE_CODE (arg0);
4677 tree arg00 = TREE_OPERAND (arg0, 0);
4678 tree arg01 = TREE_OPERAND (arg0, 1);
4679 tree arg1_type = TREE_TYPE (arg1);
4685 /* If we have A op 0 ? A : -A, consider applying the following
4688 A == 0? A : -A same as -A
4689 A != 0? A : -A same as A
4690 A >= 0? A : -A same as abs (A)
4691 A > 0? A : -A same as abs (A)
4692 A <= 0? A : -A same as -abs (A)
4693 A < 0? A : -A same as -abs (A)
4695 None of these transformations work for modes with signed
4696 zeros. If A is +/-0, the first two transformations will
4697 change the sign of the result (from +0 to -0, or vice
4698 versa). The last four will fix the sign of the result,
4699 even though the original expressions could be positive or
4700 negative, depending on the sign of A.
4702 Note that all these transformations are correct if A is
4703 NaN, since the two alternatives (A and -A) are also NaNs. */
4704 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4705 ? real_zerop (arg01)
4706 : integer_zerop (arg01))
4707 && ((TREE_CODE (arg2) == NEGATE_EXPR
4708 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4709 /* In the case that A is of the form X-Y, '-A' (arg2) may
4710 have already been folded to Y-X, check for that. */
4711 || (TREE_CODE (arg1) == MINUS_EXPR
4712 && TREE_CODE (arg2) == MINUS_EXPR
4713 && operand_equal_p (TREE_OPERAND (arg1, 0),
4714 TREE_OPERAND (arg2, 1), 0)
4715 && operand_equal_p (TREE_OPERAND (arg1, 1),
4716 TREE_OPERAND (arg2, 0), 0))))
4721 tem = fold_convert (arg1_type, arg1);
4722 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4725 return pedantic_non_lvalue (fold_convert (type, arg1));
4728 if (flag_trapping_math)
4733 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4734 arg1 = fold_convert (lang_hooks.types.signed_type
4735 (TREE_TYPE (arg1)), arg1);
4736 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4737 return pedantic_non_lvalue (fold_convert (type, tem));
4740 if (flag_trapping_math)
4744 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4745 arg1 = fold_convert (lang_hooks.types.signed_type
4746 (TREE_TYPE (arg1)), arg1);
4747 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4748 return negate_expr (fold_convert (type, tem));
4750 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4754 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4755 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4756 both transformations are correct when A is NaN: A != 0
4757 is then true, and A == 0 is false. */
4759 if (integer_zerop (arg01) && integer_zerop (arg2))
4761 if (comp_code == NE_EXPR)
4762 return pedantic_non_lvalue (fold_convert (type, arg1));
4763 else if (comp_code == EQ_EXPR)
4764 return build_int_cst (type, 0);
4767 /* Try some transformations of A op B ? A : B.
4769 A == B? A : B same as B
4770 A != B? A : B same as A
4771 A >= B? A : B same as max (A, B)
4772 A > B? A : B same as max (B, A)
4773 A <= B? A : B same as min (A, B)
4774 A < B? A : B same as min (B, A)
4776 As above, these transformations don't work in the presence
4777 of signed zeros. For example, if A and B are zeros of
4778 opposite sign, the first two transformations will change
4779 the sign of the result. In the last four, the original
4780 expressions give different results for (A=+0, B=-0) and
4781 (A=-0, B=+0), but the transformed expressions do not.
4783 The first two transformations are correct if either A or B
4784 is a NaN. In the first transformation, the condition will
4785 be false, and B will indeed be chosen. In the case of the
4786 second transformation, the condition A != B will be true,
4787 and A will be chosen.
4789 The conversions to max() and min() are not correct if B is
4790 a number and A is not. The conditions in the original
4791 expressions will be false, so all four give B. The min()
4792 and max() versions would give a NaN instead. */
4793 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4794 /* Avoid these transformations if the COND_EXPR may be used
4795 as an lvalue in the C++ front-end. PR c++/19199. */
4797 || (strcmp (lang_hooks.name, "GNU C++") != 0
4798 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4799 || ! maybe_lvalue_p (arg1)
4800 || ! maybe_lvalue_p (arg2)))
4802 tree comp_op0 = arg00;
4803 tree comp_op1 = arg01;
4804 tree comp_type = TREE_TYPE (comp_op0);
4806 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4807 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4817 return pedantic_non_lvalue (fold_convert (type, arg2));
4819 return pedantic_non_lvalue (fold_convert (type, arg1));
4824 /* In C++ a ?: expression can be an lvalue, so put the
4825 operand which will be used if they are equal first
4826 so that we can convert this back to the
4827 corresponding COND_EXPR. */
4828 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4830 comp_op0 = fold_convert (comp_type, comp_op0);
4831 comp_op1 = fold_convert (comp_type, comp_op1);
4832 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4833 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4834 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4835 return pedantic_non_lvalue (fold_convert (type, tem));
4842 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4844 comp_op0 = fold_convert (comp_type, comp_op0);
4845 comp_op1 = fold_convert (comp_type, comp_op1);
4846 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4847 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4848 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4849 return pedantic_non_lvalue (fold_convert (type, tem));
4853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4854 return pedantic_non_lvalue (fold_convert (type, arg2));
4857 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4858 return pedantic_non_lvalue (fold_convert (type, arg1));
4861 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4866 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4867 we might still be able to simplify this. For example,
4868 if C1 is one less or one more than C2, this might have started
4869 out as a MIN or MAX and been transformed by this function.
4870 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4872 if (INTEGRAL_TYPE_P (type)
4873 && TREE_CODE (arg01) == INTEGER_CST
4874 && TREE_CODE (arg2) == INTEGER_CST)
4878 /* We can replace A with C1 in this case. */
4879 arg1 = fold_convert (type, arg01);
4880 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4883 /* If C1 is C2 + 1, this is min(A, C2). */
4884 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4886 && operand_equal_p (arg01,
4887 const_binop (PLUS_EXPR, arg2,
4888 build_int_cst (type, 1), 0),
4890 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4895 /* If C1 is C2 - 1, this is min(A, C2). */
4896 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4898 && operand_equal_p (arg01,
4899 const_binop (MINUS_EXPR, arg2,
4900 build_int_cst (type, 1), 0),
4902 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4907 /* If C1 is C2 - 1, this is max(A, C2). */
4908 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4910 && operand_equal_p (arg01,
4911 const_binop (MINUS_EXPR, arg2,
4912 build_int_cst (type, 1), 0),
4914 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4919 /* If C1 is C2 + 1, this is max(A, C2). */
4920 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4922 && operand_equal_p (arg01,
4923 const_binop (PLUS_EXPR, arg2,
4924 build_int_cst (type, 1), 0),
4926 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4940 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4941 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4944 /* EXP is some logical combination of boolean tests. See if we can
4945 merge it into some range test. Return the new tree if so. */
4948 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4950 int or_op = (code == TRUTH_ORIF_EXPR
4951 || code == TRUTH_OR_EXPR);
4952 int in0_p, in1_p, in_p;
4953 tree low0, low1, low, high0, high1, high;
4954 bool strict_overflow_p = false;
4955 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4956 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4958 const char * const warnmsg = G_("assuming signed overflow does not occur "
4959 "when simplifying range test");
4961 /* If this is an OR operation, invert both sides; we will invert
4962 again at the end. */
4964 in0_p = ! in0_p, in1_p = ! in1_p;
4966 /* If both expressions are the same, if we can merge the ranges, and we
4967 can build the range test, return it or it inverted. If one of the
4968 ranges is always true or always false, consider it to be the same
4969 expression as the other. */
4970 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4971 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4973 && 0 != (tem = (build_range_check (type,
4975 : rhs != 0 ? rhs : integer_zero_node,
4978 if (strict_overflow_p)
4979 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4980 return or_op ? invert_truthvalue (tem) : tem;
4983 /* On machines where the branch cost is expensive, if this is a
4984 short-circuited branch and the underlying object on both sides
4985 is the same, make a non-short-circuit operation. */
4986 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4987 && lhs != 0 && rhs != 0
4988 && (code == TRUTH_ANDIF_EXPR
4989 || code == TRUTH_ORIF_EXPR)
4990 && operand_equal_p (lhs, rhs, 0))
4992 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4993 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4994 which cases we can't do this. */
4995 if (simple_operand_p (lhs))
4996 return build2 (code == TRUTH_ANDIF_EXPR
4997 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5000 else if (lang_hooks.decls.global_bindings_p () == 0
5001 && ! CONTAINS_PLACEHOLDER_P (lhs))
5003 tree common = save_expr (lhs);
5005 if (0 != (lhs = build_range_check (type, common,
5006 or_op ? ! in0_p : in0_p,
5008 && (0 != (rhs = build_range_check (type, common,
5009 or_op ? ! in1_p : in1_p,
5012 if (strict_overflow_p)
5013 fold_overflow_warning (warnmsg,
5014 WARN_STRICT_OVERFLOW_COMPARISON);
5015 return build2 (code == TRUTH_ANDIF_EXPR
5016 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5025 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5026 bit value. Arrange things so the extra bits will be set to zero if and
5027 only if C is signed-extended to its full width. If MASK is nonzero,
5028 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5031 unextend (tree c, int p, int unsignedp, tree mask)
5033 tree type = TREE_TYPE (c);
5034 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5037 if (p == modesize || unsignedp)
5040 /* We work by getting just the sign bit into the low-order bit, then
5041 into the high-order bit, then sign-extend. We then XOR that value
5043 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5044 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5046 /* We must use a signed type in order to get an arithmetic right shift.
5047 However, we must also avoid introducing accidental overflows, so that
5048 a subsequent call to integer_zerop will work. Hence we must
5049 do the type conversion here. At this point, the constant is either
5050 zero or one, and the conversion to a signed type can never overflow.
5051 We could get an overflow if this conversion is done anywhere else. */
5052 if (TYPE_UNSIGNED (type))
5053 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5055 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5056 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5058 temp = const_binop (BIT_AND_EXPR, temp,
5059 fold_convert (TREE_TYPE (c), mask), 0);
5060 /* If necessary, convert the type back to match the type of C. */
5061 if (TYPE_UNSIGNED (type))
5062 temp = fold_convert (type, temp);
5064 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5067 /* Find ways of folding logical expressions of LHS and RHS:
5068 Try to merge two comparisons to the same innermost item.
5069 Look for range tests like "ch >= '0' && ch <= '9'".
5070 Look for combinations of simple terms on machines with expensive branches
5071 and evaluate the RHS unconditionally.
5073 For example, if we have p->a == 2 && p->b == 4 and we can make an
5074 object large enough to span both A and B, we can do this with a comparison
5075 against the object ANDed with the a mask.
5077 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5078 operations to do this with one comparison.
5080 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5081 function and the one above.
5083 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5084 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5086 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5089 We return the simplified tree or 0 if no optimization is possible. */
5092 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5094 /* If this is the "or" of two comparisons, we can do something if
5095 the comparisons are NE_EXPR. If this is the "and", we can do something
5096 if the comparisons are EQ_EXPR. I.e.,
5097 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5099 WANTED_CODE is this operation code. For single bit fields, we can
5100 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5101 comparison for one-bit fields. */
5103 enum tree_code wanted_code;
5104 enum tree_code lcode, rcode;
5105 tree ll_arg, lr_arg, rl_arg, rr_arg;
5106 tree ll_inner, lr_inner, rl_inner, rr_inner;
5107 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5108 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5109 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5110 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5111 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5112 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5113 enum machine_mode lnmode, rnmode;
5114 tree ll_mask, lr_mask, rl_mask, rr_mask;
5115 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5116 tree l_const, r_const;
5117 tree lntype, rntype, result;
5118 int first_bit, end_bit;
5120 tree orig_lhs = lhs, orig_rhs = rhs;
5121 enum tree_code orig_code = code;
5123 /* Start by getting the comparison codes. Fail if anything is volatile.
5124 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5125 it were surrounded with a NE_EXPR. */
5127 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5130 lcode = TREE_CODE (lhs);
5131 rcode = TREE_CODE (rhs);
5133 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5135 lhs = build2 (NE_EXPR, truth_type, lhs,
5136 build_int_cst (TREE_TYPE (lhs), 0));
5140 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5142 rhs = build2 (NE_EXPR, truth_type, rhs,
5143 build_int_cst (TREE_TYPE (rhs), 0));
5147 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5148 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5151 ll_arg = TREE_OPERAND (lhs, 0);
5152 lr_arg = TREE_OPERAND (lhs, 1);
5153 rl_arg = TREE_OPERAND (rhs, 0);
5154 rr_arg = TREE_OPERAND (rhs, 1);
5156 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5157 if (simple_operand_p (ll_arg)
5158 && simple_operand_p (lr_arg))
5161 if (operand_equal_p (ll_arg, rl_arg, 0)
5162 && operand_equal_p (lr_arg, rr_arg, 0))
5164 result = combine_comparisons (code, lcode, rcode,
5165 truth_type, ll_arg, lr_arg);
5169 else if (operand_equal_p (ll_arg, rr_arg, 0)
5170 && operand_equal_p (lr_arg, rl_arg, 0))
5172 result = combine_comparisons (code, lcode,
5173 swap_tree_comparison (rcode),
5174 truth_type, ll_arg, lr_arg);
5180 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5181 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5183 /* If the RHS can be evaluated unconditionally and its operands are
5184 simple, it wins to evaluate the RHS unconditionally on machines
5185 with expensive branches. In this case, this isn't a comparison
5186 that can be merged. Avoid doing this if the RHS is a floating-point
5187 comparison since those can trap. */
5189 if (BRANCH_COST >= 2
5190 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5191 && simple_operand_p (rl_arg)
5192 && simple_operand_p (rr_arg))
5194 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5195 if (code == TRUTH_OR_EXPR
5196 && lcode == NE_EXPR && integer_zerop (lr_arg)
5197 && rcode == NE_EXPR && integer_zerop (rr_arg)
5198 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5199 return build2 (NE_EXPR, truth_type,
5200 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5202 build_int_cst (TREE_TYPE (ll_arg), 0));
5204 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5205 if (code == TRUTH_AND_EXPR
5206 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5207 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5208 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5209 return build2 (EQ_EXPR, truth_type,
5210 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5212 build_int_cst (TREE_TYPE (ll_arg), 0));
5214 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5216 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5217 return build2 (code, truth_type, lhs, rhs);
5222 /* See if the comparisons can be merged. Then get all the parameters for
5225 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5226 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5230 ll_inner = decode_field_reference (ll_arg,
5231 &ll_bitsize, &ll_bitpos, &ll_mode,
5232 &ll_unsignedp, &volatilep, &ll_mask,
5234 lr_inner = decode_field_reference (lr_arg,
5235 &lr_bitsize, &lr_bitpos, &lr_mode,
5236 &lr_unsignedp, &volatilep, &lr_mask,
5238 rl_inner = decode_field_reference (rl_arg,
5239 &rl_bitsize, &rl_bitpos, &rl_mode,
5240 &rl_unsignedp, &volatilep, &rl_mask,
5242 rr_inner = decode_field_reference (rr_arg,
5243 &rr_bitsize, &rr_bitpos, &rr_mode,
5244 &rr_unsignedp, &volatilep, &rr_mask,
5247 /* It must be true that the inner operation on the lhs of each
5248 comparison must be the same if we are to be able to do anything.
5249 Then see if we have constants. If not, the same must be true for
5251 if (volatilep || ll_inner == 0 || rl_inner == 0
5252 || ! operand_equal_p (ll_inner, rl_inner, 0))
5255 if (TREE_CODE (lr_arg) == INTEGER_CST
5256 && TREE_CODE (rr_arg) == INTEGER_CST)
5257 l_const = lr_arg, r_const = rr_arg;
5258 else if (lr_inner == 0 || rr_inner == 0
5259 || ! operand_equal_p (lr_inner, rr_inner, 0))
5262 l_const = r_const = 0;
5264 /* If either comparison code is not correct for our logical operation,
5265 fail. However, we can convert a one-bit comparison against zero into
5266 the opposite comparison against that bit being set in the field. */
5268 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5269 if (lcode != wanted_code)
5271 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5273 /* Make the left operand unsigned, since we are only interested
5274 in the value of one bit. Otherwise we are doing the wrong
5283 /* This is analogous to the code for l_const above. */
5284 if (rcode != wanted_code)
5286 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5295 /* See if we can find a mode that contains both fields being compared on
5296 the left. If we can't, fail. Otherwise, update all constants and masks
5297 to be relative to a field of that size. */
5298 first_bit = MIN (ll_bitpos, rl_bitpos);
5299 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5300 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5301 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5303 if (lnmode == VOIDmode)
5306 lnbitsize = GET_MODE_BITSIZE (lnmode);
5307 lnbitpos = first_bit & ~ (lnbitsize - 1);
5308 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5309 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5311 if (BYTES_BIG_ENDIAN)
5313 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5314 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5317 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5318 size_int (xll_bitpos), 0);
5319 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5320 size_int (xrl_bitpos), 0);
5324 l_const = fold_convert (lntype, l_const);
5325 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5326 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5327 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5328 fold_build1 (BIT_NOT_EXPR,
5332 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5334 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5339 r_const = fold_convert (lntype, r_const);
5340 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5341 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5342 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5343 fold_build1 (BIT_NOT_EXPR,
5347 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5349 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5353 /* If the right sides are not constant, do the same for it. Also,
5354 disallow this optimization if a size or signedness mismatch occurs
5355 between the left and right sides. */
5358 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5359 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5360 /* Make sure the two fields on the right
5361 correspond to the left without being swapped. */
5362 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5365 first_bit = MIN (lr_bitpos, rr_bitpos);
5366 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5367 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5368 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5370 if (rnmode == VOIDmode)
5373 rnbitsize = GET_MODE_BITSIZE (rnmode);
5374 rnbitpos = first_bit & ~ (rnbitsize - 1);
5375 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5376 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5378 if (BYTES_BIG_ENDIAN)
5380 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5381 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5384 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5385 size_int (xlr_bitpos), 0);
5386 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5387 size_int (xrr_bitpos), 0);
5389 /* Make a mask that corresponds to both fields being compared.
5390 Do this for both items being compared. If the operands are the
5391 same size and the bits being compared are in the same position
5392 then we can do this by masking both and comparing the masked
5394 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5395 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5396 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5398 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5399 ll_unsignedp || rl_unsignedp);
5400 if (! all_ones_mask_p (ll_mask, lnbitsize))
5401 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5403 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5404 lr_unsignedp || rr_unsignedp);
5405 if (! all_ones_mask_p (lr_mask, rnbitsize))
5406 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5408 return build2 (wanted_code, truth_type, lhs, rhs);
5411 /* There is still another way we can do something: If both pairs of
5412 fields being compared are adjacent, we may be able to make a wider
5413 field containing them both.
5415 Note that we still must mask the lhs/rhs expressions. Furthermore,
5416 the mask must be shifted to account for the shift done by
5417 make_bit_field_ref. */
5418 if ((ll_bitsize + ll_bitpos == rl_bitpos
5419 && lr_bitsize + lr_bitpos == rr_bitpos)
5420 || (ll_bitpos == rl_bitpos + rl_bitsize
5421 && lr_bitpos == rr_bitpos + rr_bitsize))
5425 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5426 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5427 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5428 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5430 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5431 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5432 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5433 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5435 /* Convert to the smaller type before masking out unwanted bits. */
5437 if (lntype != rntype)
5439 if (lnbitsize > rnbitsize)
5441 lhs = fold_convert (rntype, lhs);
5442 ll_mask = fold_convert (rntype, ll_mask);
5445 else if (lnbitsize < rnbitsize)
5447 rhs = fold_convert (lntype, rhs);
5448 lr_mask = fold_convert (lntype, lr_mask);
5453 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5454 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5456 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5457 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5459 return build2 (wanted_code, truth_type, lhs, rhs);
5465 /* Handle the case of comparisons with constants. If there is something in
5466 common between the masks, those bits of the constants must be the same.
5467 If not, the condition is always false. Test for this to avoid generating
5468 incorrect code below. */
5469 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5470 if (! integer_zerop (result)
5471 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5472 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5474 if (wanted_code == NE_EXPR)
5476 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5477 return constant_boolean_node (true, truth_type);
5481 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5482 return constant_boolean_node (false, truth_type);
5486 /* Construct the expression we will return. First get the component
5487 reference we will make. Unless the mask is all ones the width of
5488 that field, perform the mask operation. Then compare with the
5490 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5491 ll_unsignedp || rl_unsignedp);
5493 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5494 if (! all_ones_mask_p (ll_mask, lnbitsize))
5495 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5497 return build2 (wanted_code, truth_type, result,
5498 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5501 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5505 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5508 enum tree_code op_code;
5509 tree comp_const = op1;
5511 int consts_equal, consts_lt;
5514 STRIP_SIGN_NOPS (arg0);
5516 op_code = TREE_CODE (arg0);
5517 minmax_const = TREE_OPERAND (arg0, 1);
5518 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5519 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5520 inner = TREE_OPERAND (arg0, 0);
5522 /* If something does not permit us to optimize, return the original tree. */
5523 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5524 || TREE_CODE (comp_const) != INTEGER_CST
5525 || TREE_OVERFLOW (comp_const)
5526 || TREE_CODE (minmax_const) != INTEGER_CST
5527 || TREE_OVERFLOW (minmax_const))
5530 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5531 and GT_EXPR, doing the rest with recursive calls using logical
5535 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5537 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5540 return invert_truthvalue (tem);
5546 fold_build2 (TRUTH_ORIF_EXPR, type,
5547 optimize_minmax_comparison
5548 (EQ_EXPR, type, arg0, comp_const),
5549 optimize_minmax_comparison
5550 (GT_EXPR, type, arg0, comp_const));
5553 if (op_code == MAX_EXPR && consts_equal)
5554 /* MAX (X, 0) == 0 -> X <= 0 */
5555 return fold_build2 (LE_EXPR, type, inner, comp_const);
5557 else if (op_code == MAX_EXPR && consts_lt)
5558 /* MAX (X, 0) == 5 -> X == 5 */
5559 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5561 else if (op_code == MAX_EXPR)
5562 /* MAX (X, 0) == -1 -> false */
5563 return omit_one_operand (type, integer_zero_node, inner);
5565 else if (consts_equal)
5566 /* MIN (X, 0) == 0 -> X >= 0 */
5567 return fold_build2 (GE_EXPR, type, inner, comp_const);
5570 /* MIN (X, 0) == 5 -> false */
5571 return omit_one_operand (type, integer_zero_node, inner);
5574 /* MIN (X, 0) == -1 -> X == -1 */
5575 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5578 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5579 /* MAX (X, 0) > 0 -> X > 0
5580 MAX (X, 0) > 5 -> X > 5 */
5581 return fold_build2 (GT_EXPR, type, inner, comp_const);
5583 else if (op_code == MAX_EXPR)
5584 /* MAX (X, 0) > -1 -> true */
5585 return omit_one_operand (type, integer_one_node, inner);
5587 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5588 /* MIN (X, 0) > 0 -> false
5589 MIN (X, 0) > 5 -> false */
5590 return omit_one_operand (type, integer_zero_node, inner);
5593 /* MIN (X, 0) > -1 -> X > -1 */
5594 return fold_build2 (GT_EXPR, type, inner, comp_const);
5601 /* T is an integer expression that is being multiplied, divided, or taken a
5602 modulus (CODE says which and what kind of divide or modulus) by a
5603 constant C. See if we can eliminate that operation by folding it with
5604 other operations already in T. WIDE_TYPE, if non-null, is a type that
5605 should be used for the computation if wider than our type.
5607 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5608 (X * 2) + (Y * 4). We must, however, be assured that either the original
5609 expression would not overflow or that overflow is undefined for the type
5610 in the language in question.
5612 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5613 the machine has a multiply-accumulate insn or that this is part of an
5614 addressing calculation.
5616 If we return a non-null expression, it is an equivalent form of the
5617 original computation, but need not be in the original type.
5619 We set *STRICT_OVERFLOW_P to true if the return values depends on
5620 signed overflow being undefined. Otherwise we do not change
5621 *STRICT_OVERFLOW_P. */
5624 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5625 bool *strict_overflow_p)
5627 /* To avoid exponential search depth, refuse to allow recursion past
5628 three levels. Beyond that (1) it's highly unlikely that we'll find
5629 something interesting and (2) we've probably processed it before
5630 when we built the inner expression. */
5639 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5646 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5647 bool *strict_overflow_p)
5649 tree type = TREE_TYPE (t);
5650 enum tree_code tcode = TREE_CODE (t);
5651 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5652 > GET_MODE_SIZE (TYPE_MODE (type)))
5653 ? wide_type : type);
5655 int same_p = tcode == code;
5656 tree op0 = NULL_TREE, op1 = NULL_TREE;
5657 bool sub_strict_overflow_p;
5659 /* Don't deal with constants of zero here; they confuse the code below. */
5660 if (integer_zerop (c))
5663 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5664 op0 = TREE_OPERAND (t, 0);
5666 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5667 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5669 /* Note that we need not handle conditional operations here since fold
5670 already handles those cases. So just do arithmetic here. */
5674 /* For a constant, we can always simplify if we are a multiply
5675 or (for divide and modulus) if it is a multiple of our constant. */
5676 if (code == MULT_EXPR
5677 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5678 return const_binop (code, fold_convert (ctype, t),
5679 fold_convert (ctype, c), 0);
5682 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5683 /* If op0 is an expression ... */
5684 if ((COMPARISON_CLASS_P (op0)
5685 || UNARY_CLASS_P (op0)
5686 || BINARY_CLASS_P (op0)
5687 || VL_EXP_CLASS_P (op0)
5688 || EXPRESSION_CLASS_P (op0))
5689 /* ... and is unsigned, and its type is smaller than ctype,
5690 then we cannot pass through as widening. */
5691 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5692 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5693 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5694 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5695 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5696 /* ... or this is a truncation (t is narrower than op0),
5697 then we cannot pass through this narrowing. */
5698 || (GET_MODE_SIZE (TYPE_MODE (type))
5699 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5700 /* ... or signedness changes for division or modulus,
5701 then we cannot pass through this conversion. */
5702 || (code != MULT_EXPR
5703 && (TYPE_UNSIGNED (ctype)
5704 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5707 /* Pass the constant down and see if we can make a simplification. If
5708 we can, replace this expression with the inner simplification for
5709 possible later conversion to our or some other type. */
5710 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5711 && TREE_CODE (t2) == INTEGER_CST
5712 && !TREE_OVERFLOW (t2)
5713 && (0 != (t1 = extract_muldiv (op0, t2, code,
5715 ? ctype : NULL_TREE,
5716 strict_overflow_p))))
5721 /* If widening the type changes it from signed to unsigned, then we
5722 must avoid building ABS_EXPR itself as unsigned. */
5723 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5725 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5726 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5729 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5730 return fold_convert (ctype, t1);
5736 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5738 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5741 case MIN_EXPR: case MAX_EXPR:
5742 /* If widening the type changes the signedness, then we can't perform
5743 this optimization as that changes the result. */
5744 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5747 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5748 sub_strict_overflow_p = false;
5749 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5750 &sub_strict_overflow_p)) != 0
5751 && (t2 = extract_muldiv (op1, c, code, wide_type,
5752 &sub_strict_overflow_p)) != 0)
5754 if (tree_int_cst_sgn (c) < 0)
5755 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5756 if (sub_strict_overflow_p)
5757 *strict_overflow_p = true;
5758 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5759 fold_convert (ctype, t2));
5763 case LSHIFT_EXPR: case RSHIFT_EXPR:
5764 /* If the second operand is constant, this is a multiplication
5765 or floor division, by a power of two, so we can treat it that
5766 way unless the multiplier or divisor overflows. Signed
5767 left-shift overflow is implementation-defined rather than
5768 undefined in C90, so do not convert signed left shift into
5770 if (TREE_CODE (op1) == INTEGER_CST
5771 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5772 /* const_binop may not detect overflow correctly,
5773 so check for it explicitly here. */
5774 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5775 && TREE_INT_CST_HIGH (op1) == 0
5776 && 0 != (t1 = fold_convert (ctype,
5777 const_binop (LSHIFT_EXPR,
5780 && !TREE_OVERFLOW (t1))
5781 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5782 ? MULT_EXPR : FLOOR_DIV_EXPR,
5783 ctype, fold_convert (ctype, op0), t1),
5784 c, code, wide_type, strict_overflow_p);
5787 case PLUS_EXPR: case MINUS_EXPR:
5788 /* See if we can eliminate the operation on both sides. If we can, we
5789 can return a new PLUS or MINUS. If we can't, the only remaining
5790 cases where we can do anything are if the second operand is a
5792 sub_strict_overflow_p = false;
5793 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5794 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5795 if (t1 != 0 && t2 != 0
5796 && (code == MULT_EXPR
5797 /* If not multiplication, we can only do this if both operands
5798 are divisible by c. */
5799 || (multiple_of_p (ctype, op0, c)
5800 && multiple_of_p (ctype, op1, c))))
5802 if (sub_strict_overflow_p)
5803 *strict_overflow_p = true;
5804 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5805 fold_convert (ctype, t2));
5808 /* If this was a subtraction, negate OP1 and set it to be an addition.
5809 This simplifies the logic below. */
5810 if (tcode == MINUS_EXPR)
5811 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5813 if (TREE_CODE (op1) != INTEGER_CST)
5816 /* If either OP1 or C are negative, this optimization is not safe for
5817 some of the division and remainder types while for others we need
5818 to change the code. */
5819 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5821 if (code == CEIL_DIV_EXPR)
5822 code = FLOOR_DIV_EXPR;
5823 else if (code == FLOOR_DIV_EXPR)
5824 code = CEIL_DIV_EXPR;
5825 else if (code != MULT_EXPR
5826 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5830 /* If it's a multiply or a division/modulus operation of a multiple
5831 of our constant, do the operation and verify it doesn't overflow. */
5832 if (code == MULT_EXPR
5833 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5835 op1 = const_binop (code, fold_convert (ctype, op1),
5836 fold_convert (ctype, c), 0);
5837 /* We allow the constant to overflow with wrapping semantics. */
5839 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5845 /* If we have an unsigned type is not a sizetype, we cannot widen
5846 the operation since it will change the result if the original
5847 computation overflowed. */
5848 if (TYPE_UNSIGNED (ctype)
5849 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5853 /* If we were able to eliminate our operation from the first side,
5854 apply our operation to the second side and reform the PLUS. */
5855 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5856 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5858 /* The last case is if we are a multiply. In that case, we can
5859 apply the distributive law to commute the multiply and addition
5860 if the multiplication of the constants doesn't overflow. */
5861 if (code == MULT_EXPR)
5862 return fold_build2 (tcode, ctype,
5863 fold_build2 (code, ctype,
5864 fold_convert (ctype, op0),
5865 fold_convert (ctype, c)),
5871 /* We have a special case here if we are doing something like
5872 (C * 8) % 4 since we know that's zero. */
5873 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5874 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5875 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5876 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5877 return omit_one_operand (type, integer_zero_node, op0);
5879 /* ... fall through ... */
5881 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5882 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5883 /* If we can extract our operation from the LHS, do so and return a
5884 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5885 do something only if the second operand is a constant. */
5887 && (t1 = extract_muldiv (op0, c, code, wide_type,
5888 strict_overflow_p)) != 0)
5889 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5890 fold_convert (ctype, op1));
5891 else if (tcode == MULT_EXPR && code == MULT_EXPR
5892 && (t1 = extract_muldiv (op1, c, code, wide_type,
5893 strict_overflow_p)) != 0)
5894 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5895 fold_convert (ctype, t1));
5896 else if (TREE_CODE (op1) != INTEGER_CST)
5899 /* If these are the same operation types, we can associate them
5900 assuming no overflow. */
5902 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5903 fold_convert (ctype, c), 0))
5904 && !TREE_OVERFLOW (t1))
5905 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5907 /* If these operations "cancel" each other, we have the main
5908 optimizations of this pass, which occur when either constant is a
5909 multiple of the other, in which case we replace this with either an
5910 operation or CODE or TCODE.
5912 If we have an unsigned type that is not a sizetype, we cannot do
5913 this since it will change the result if the original computation
5915 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5916 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5917 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5918 || (tcode == MULT_EXPR
5919 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5920 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5922 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5924 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5925 *strict_overflow_p = true;
5926 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5927 fold_convert (ctype,
5928 const_binop (TRUNC_DIV_EXPR,
5931 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5933 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5934 *strict_overflow_p = true;
5935 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5936 fold_convert (ctype,
5937 const_binop (TRUNC_DIV_EXPR,
5950 /* Return a node which has the indicated constant VALUE (either 0 or
5951 1), and is of the indicated TYPE. */
5954 constant_boolean_node (int value, tree type)
5956 if (type == integer_type_node)
5957 return value ? integer_one_node : integer_zero_node;
5958 else if (type == boolean_type_node)
5959 return value ? boolean_true_node : boolean_false_node;
5961 return build_int_cst (type, value);
5965 /* Return true if expr looks like an ARRAY_REF and set base and
5966 offset to the appropriate trees. If there is no offset,
5967 offset is set to NULL_TREE. Base will be canonicalized to
5968 something you can get the element type from using
5969 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5970 in bytes to the base. */
5973 extract_array_ref (tree expr, tree *base, tree *offset)
5975 /* One canonical form is a PLUS_EXPR with the first
5976 argument being an ADDR_EXPR with a possible NOP_EXPR
5978 if (TREE_CODE (expr) == PLUS_EXPR)
5980 tree op0 = TREE_OPERAND (expr, 0);
5981 tree inner_base, dummy1;
5982 /* Strip NOP_EXPRs here because the C frontends and/or
5983 folders present us (int *)&x.a + 4B possibly. */
5985 if (extract_array_ref (op0, &inner_base, &dummy1))
5988 if (dummy1 == NULL_TREE)
5989 *offset = TREE_OPERAND (expr, 1);
5991 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5992 dummy1, TREE_OPERAND (expr, 1));
5996 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5997 which we transform into an ADDR_EXPR with appropriate
5998 offset. For other arguments to the ADDR_EXPR we assume
5999 zero offset and as such do not care about the ADDR_EXPR
6000 type and strip possible nops from it. */
6001 else if (TREE_CODE (expr) == ADDR_EXPR)
6003 tree op0 = TREE_OPERAND (expr, 0);
6004 if (TREE_CODE (op0) == ARRAY_REF)
6006 tree idx = TREE_OPERAND (op0, 1);
6007 *base = TREE_OPERAND (op0, 0);
6008 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6009 array_ref_element_size (op0));
6013 /* Handle array-to-pointer decay as &a. */
6014 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6015 *base = TREE_OPERAND (expr, 0);
6018 *offset = NULL_TREE;
6022 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6023 else if (SSA_VAR_P (expr)
6024 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6027 *offset = NULL_TREE;
6035 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6036 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6037 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6038 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6039 COND is the first argument to CODE; otherwise (as in the example
6040 given here), it is the second argument. TYPE is the type of the
6041 original expression. Return NULL_TREE if no simplification is
6045 fold_binary_op_with_conditional_arg (enum tree_code code,
6046 tree type, tree op0, tree op1,
6047 tree cond, tree arg, int cond_first_p)
6049 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6050 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6051 tree test, true_value, false_value;
6052 tree lhs = NULL_TREE;
6053 tree rhs = NULL_TREE;
6055 /* This transformation is only worthwhile if we don't have to wrap
6056 arg in a SAVE_EXPR, and the operation can be simplified on at least
6057 one of the branches once its pushed inside the COND_EXPR. */
6058 if (!TREE_CONSTANT (arg))
6061 if (TREE_CODE (cond) == COND_EXPR)
6063 test = TREE_OPERAND (cond, 0);
6064 true_value = TREE_OPERAND (cond, 1);
6065 false_value = TREE_OPERAND (cond, 2);
6066 /* If this operand throws an expression, then it does not make
6067 sense to try to perform a logical or arithmetic operation
6069 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6071 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6076 tree testtype = TREE_TYPE (cond);
6078 true_value = constant_boolean_node (true, testtype);
6079 false_value = constant_boolean_node (false, testtype);
6082 arg = fold_convert (arg_type, arg);
6085 true_value = fold_convert (cond_type, true_value);
6087 lhs = fold_build2 (code, type, true_value, arg);
6089 lhs = fold_build2 (code, type, arg, true_value);
6093 false_value = fold_convert (cond_type, false_value);
6095 rhs = fold_build2 (code, type, false_value, arg);
6097 rhs = fold_build2 (code, type, arg, false_value);
6100 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6101 return fold_convert (type, test);
6105 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6107 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6108 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6109 ADDEND is the same as X.
6111 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6112 and finite. The problematic cases are when X is zero, and its mode
6113 has signed zeros. In the case of rounding towards -infinity,
6114 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6115 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6118 fold_real_zero_addition_p (tree type, tree addend, int negate)
6120 if (!real_zerop (addend))
6123 /* Don't allow the fold with -fsignaling-nans. */
6124 if (HONOR_SNANS (TYPE_MODE (type)))
6127 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6128 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6131 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6132 if (TREE_CODE (addend) == REAL_CST
6133 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6136 /* The mode has signed zeros, and we have to honor their sign.
6137 In this situation, there is only one case we can return true for.
6138 X - 0 is the same as X unless rounding towards -infinity is
6140 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6143 /* Subroutine of fold() that checks comparisons of built-in math
6144 functions against real constants.
6146 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6147 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6148 is the type of the result and ARG0 and ARG1 are the operands of the
6149 comparison. ARG1 must be a TREE_REAL_CST.
6151 The function returns the constant folded tree if a simplification
6152 can be made, and NULL_TREE otherwise. */
6155 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6156 tree type, tree arg0, tree arg1)
6160 if (BUILTIN_SQRT_P (fcode))
6162 tree arg = CALL_EXPR_ARG (arg0, 0);
6163 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6165 c = TREE_REAL_CST (arg1);
6166 if (REAL_VALUE_NEGATIVE (c))
6168 /* sqrt(x) < y is always false, if y is negative. */
6169 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6170 return omit_one_operand (type, integer_zero_node, arg);
6172 /* sqrt(x) > y is always true, if y is negative and we
6173 don't care about NaNs, i.e. negative values of x. */
6174 if (code == NE_EXPR || !HONOR_NANS (mode))
6175 return omit_one_operand (type, integer_one_node, arg);
6177 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6178 return fold_build2 (GE_EXPR, type, arg,
6179 build_real (TREE_TYPE (arg), dconst0));
6181 else if (code == GT_EXPR || code == GE_EXPR)
6185 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6186 real_convert (&c2, mode, &c2);
6188 if (REAL_VALUE_ISINF (c2))
6190 /* sqrt(x) > y is x == +Inf, when y is very large. */
6191 if (HONOR_INFINITIES (mode))
6192 return fold_build2 (EQ_EXPR, type, arg,
6193 build_real (TREE_TYPE (arg), c2));
6195 /* sqrt(x) > y is always false, when y is very large
6196 and we don't care about infinities. */
6197 return omit_one_operand (type, integer_zero_node, arg);
6200 /* sqrt(x) > c is the same as x > c*c. */
6201 return fold_build2 (code, type, arg,
6202 build_real (TREE_TYPE (arg), c2));
6204 else if (code == LT_EXPR || code == LE_EXPR)
6208 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6209 real_convert (&c2, mode, &c2);
6211 if (REAL_VALUE_ISINF (c2))
6213 /* sqrt(x) < y is always true, when y is a very large
6214 value and we don't care about NaNs or Infinities. */
6215 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6216 return omit_one_operand (type, integer_one_node, arg);
6218 /* sqrt(x) < y is x != +Inf when y is very large and we
6219 don't care about NaNs. */
6220 if (! HONOR_NANS (mode))
6221 return fold_build2 (NE_EXPR, type, arg,
6222 build_real (TREE_TYPE (arg), c2));
6224 /* sqrt(x) < y is x >= 0 when y is very large and we
6225 don't care about Infinities. */
6226 if (! HONOR_INFINITIES (mode))
6227 return fold_build2 (GE_EXPR, type, arg,
6228 build_real (TREE_TYPE (arg), dconst0));
6230 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6231 if (lang_hooks.decls.global_bindings_p () != 0
6232 || CONTAINS_PLACEHOLDER_P (arg))
6235 arg = save_expr (arg);
6236 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6237 fold_build2 (GE_EXPR, type, arg,
6238 build_real (TREE_TYPE (arg),
6240 fold_build2 (NE_EXPR, type, arg,
6241 build_real (TREE_TYPE (arg),
6245 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6246 if (! HONOR_NANS (mode))
6247 return fold_build2 (code, type, arg,
6248 build_real (TREE_TYPE (arg), c2));
6250 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6251 if (lang_hooks.decls.global_bindings_p () == 0
6252 && ! CONTAINS_PLACEHOLDER_P (arg))
6254 arg = save_expr (arg);
6255 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6256 fold_build2 (GE_EXPR, type, arg,
6257 build_real (TREE_TYPE (arg),
6259 fold_build2 (code, type, arg,
6260 build_real (TREE_TYPE (arg),
6269 /* Subroutine of fold() that optimizes comparisons against Infinities,
6270 either +Inf or -Inf.
6272 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6273 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6274 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6276 The function returns the constant folded tree if a simplification
6277 can be made, and NULL_TREE otherwise. */
6280 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6282 enum machine_mode mode;
6283 REAL_VALUE_TYPE max;
6287 mode = TYPE_MODE (TREE_TYPE (arg0));
6289 /* For negative infinity swap the sense of the comparison. */
6290 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6292 code = swap_tree_comparison (code);
6297 /* x > +Inf is always false, if with ignore sNANs. */
6298 if (HONOR_SNANS (mode))
6300 return omit_one_operand (type, integer_zero_node, arg0);
6303 /* x <= +Inf is always true, if we don't case about NaNs. */
6304 if (! HONOR_NANS (mode))
6305 return omit_one_operand (type, integer_one_node, arg0);
6307 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6308 if (lang_hooks.decls.global_bindings_p () == 0
6309 && ! CONTAINS_PLACEHOLDER_P (arg0))
6311 arg0 = save_expr (arg0);
6312 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6318 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6319 real_maxval (&max, neg, mode);
6320 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6321 arg0, build_real (TREE_TYPE (arg0), max));
6324 /* x < +Inf is always equal to x <= DBL_MAX. */
6325 real_maxval (&max, neg, mode);
6326 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6327 arg0, build_real (TREE_TYPE (arg0), max));
6330 /* x != +Inf is always equal to !(x > DBL_MAX). */
6331 real_maxval (&max, neg, mode);
6332 if (! HONOR_NANS (mode))
6333 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6334 arg0, build_real (TREE_TYPE (arg0), max));
6336 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6337 arg0, build_real (TREE_TYPE (arg0), max));
6338 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6347 /* Subroutine of fold() that optimizes comparisons of a division by
6348 a nonzero integer constant against an integer constant, i.e.
6351 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6352 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6353 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6355 The function returns the constant folded tree if a simplification
6356 can be made, and NULL_TREE otherwise. */
6359 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6361 tree prod, tmp, hi, lo;
6362 tree arg00 = TREE_OPERAND (arg0, 0);
6363 tree arg01 = TREE_OPERAND (arg0, 1);
6364 unsigned HOST_WIDE_INT lpart;
6365 HOST_WIDE_INT hpart;
6366 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6370 /* We have to do this the hard way to detect unsigned overflow.
6371 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6372 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6373 TREE_INT_CST_HIGH (arg01),
6374 TREE_INT_CST_LOW (arg1),
6375 TREE_INT_CST_HIGH (arg1),
6376 &lpart, &hpart, unsigned_p);
6377 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6379 neg_overflow = false;
6383 tmp = int_const_binop (MINUS_EXPR, arg01,
6384 build_int_cst (TREE_TYPE (arg01), 1), 0);
6387 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6388 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6389 TREE_INT_CST_HIGH (prod),
6390 TREE_INT_CST_LOW (tmp),
6391 TREE_INT_CST_HIGH (tmp),
6392 &lpart, &hpart, unsigned_p);
6393 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6394 -1, overflow | TREE_OVERFLOW (prod));
6396 else if (tree_int_cst_sgn (arg01) >= 0)
6398 tmp = int_const_binop (MINUS_EXPR, arg01,
6399 build_int_cst (TREE_TYPE (arg01), 1), 0);
6400 switch (tree_int_cst_sgn (arg1))
6403 neg_overflow = true;
6404 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6409 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6414 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6424 /* A negative divisor reverses the relational operators. */
6425 code = swap_tree_comparison (code);
6427 tmp = int_const_binop (PLUS_EXPR, arg01,
6428 build_int_cst (TREE_TYPE (arg01), 1), 0);
6429 switch (tree_int_cst_sgn (arg1))
6432 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6437 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6442 neg_overflow = true;
6443 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6455 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6456 return omit_one_operand (type, integer_zero_node, arg00);
6457 if (TREE_OVERFLOW (hi))
6458 return fold_build2 (GE_EXPR, type, arg00, lo);
6459 if (TREE_OVERFLOW (lo))
6460 return fold_build2 (LE_EXPR, type, arg00, hi);
6461 return build_range_check (type, arg00, 1, lo, hi);
6464 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6465 return omit_one_operand (type, integer_one_node, arg00);
6466 if (TREE_OVERFLOW (hi))
6467 return fold_build2 (LT_EXPR, type, arg00, lo);
6468 if (TREE_OVERFLOW (lo))
6469 return fold_build2 (GT_EXPR, type, arg00, hi);
6470 return build_range_check (type, arg00, 0, lo, hi);
6473 if (TREE_OVERFLOW (lo))
6475 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6476 return omit_one_operand (type, tmp, arg00);
6478 return fold_build2 (LT_EXPR, type, arg00, lo);
6481 if (TREE_OVERFLOW (hi))
6483 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6484 return omit_one_operand (type, tmp, arg00);
6486 return fold_build2 (LE_EXPR, type, arg00, hi);
6489 if (TREE_OVERFLOW (hi))
6491 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6492 return omit_one_operand (type, tmp, arg00);
6494 return fold_build2 (GT_EXPR, type, arg00, hi);
6497 if (TREE_OVERFLOW (lo))
6499 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6500 return omit_one_operand (type, tmp, arg00);
6502 return fold_build2 (GE_EXPR, type, arg00, lo);
6512 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6513 equality/inequality test, then return a simplified form of the test
6514 using a sign testing. Otherwise return NULL. TYPE is the desired
6518 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6521 /* If this is testing a single bit, we can optimize the test. */
6522 if ((code == NE_EXPR || code == EQ_EXPR)
6523 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6524 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6526 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6527 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6528 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6530 if (arg00 != NULL_TREE
6531 /* This is only a win if casting to a signed type is cheap,
6532 i.e. when arg00's type is not a partial mode. */
6533 && TYPE_PRECISION (TREE_TYPE (arg00))
6534 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6536 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6537 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6538 result_type, fold_convert (stype, arg00),
6539 build_int_cst (stype, 0));
6546 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6547 equality/inequality test, then return a simplified form of
6548 the test using shifts and logical operations. Otherwise return
6549 NULL. TYPE is the desired result type. */
6552 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6555 /* If this is testing a single bit, we can optimize the test. */
6556 if ((code == NE_EXPR || code == EQ_EXPR)
6557 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6558 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6560 tree inner = TREE_OPERAND (arg0, 0);
6561 tree type = TREE_TYPE (arg0);
6562 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6563 enum machine_mode operand_mode = TYPE_MODE (type);
6565 tree signed_type, unsigned_type, intermediate_type;
6568 /* First, see if we can fold the single bit test into a sign-bit
6570 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6575 /* Otherwise we have (A & C) != 0 where C is a single bit,
6576 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6577 Similarly for (A & C) == 0. */
6579 /* If INNER is a right shift of a constant and it plus BITNUM does
6580 not overflow, adjust BITNUM and INNER. */
6581 if (TREE_CODE (inner) == RSHIFT_EXPR
6582 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6583 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6584 && bitnum < TYPE_PRECISION (type)
6585 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6586 bitnum - TYPE_PRECISION (type)))
6588 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6589 inner = TREE_OPERAND (inner, 0);
6592 /* If we are going to be able to omit the AND below, we must do our
6593 operations as unsigned. If we must use the AND, we have a choice.
6594 Normally unsigned is faster, but for some machines signed is. */
6595 #ifdef LOAD_EXTEND_OP
6596 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6597 && !flag_syntax_only) ? 0 : 1;
6602 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6603 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6604 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6605 inner = fold_convert (intermediate_type, inner);
6608 inner = build2 (RSHIFT_EXPR, intermediate_type,
6609 inner, size_int (bitnum));
6611 one = build_int_cst (intermediate_type, 1);
6613 if (code == EQ_EXPR)
6614 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6616 /* Put the AND last so it can combine with more things. */
6617 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6619 /* Make sure to return the proper type. */
6620 inner = fold_convert (result_type, inner);
6627 /* Check whether we are allowed to reorder operands arg0 and arg1,
6628 such that the evaluation of arg1 occurs before arg0. */
6631 reorder_operands_p (tree arg0, tree arg1)
6633 if (! flag_evaluation_order)
6635 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6637 return ! TREE_SIDE_EFFECTS (arg0)
6638 && ! TREE_SIDE_EFFECTS (arg1);
6641 /* Test whether it is preferable two swap two operands, ARG0 and
6642 ARG1, for example because ARG0 is an integer constant and ARG1
6643 isn't. If REORDER is true, only recommend swapping if we can
6644 evaluate the operands in reverse order. */
6647 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6649 STRIP_SIGN_NOPS (arg0);
6650 STRIP_SIGN_NOPS (arg1);
6652 if (TREE_CODE (arg1) == INTEGER_CST)
6654 if (TREE_CODE (arg0) == INTEGER_CST)
6657 if (TREE_CODE (arg1) == REAL_CST)
6659 if (TREE_CODE (arg0) == REAL_CST)
6662 if (TREE_CODE (arg1) == COMPLEX_CST)
6664 if (TREE_CODE (arg0) == COMPLEX_CST)
6667 if (TREE_CONSTANT (arg1))
6669 if (TREE_CONSTANT (arg0))
6675 if (reorder && flag_evaluation_order
6676 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6679 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6680 for commutative and comparison operators. Ensuring a canonical
6681 form allows the optimizers to find additional redundancies without
6682 having to explicitly check for both orderings. */
6683 if (TREE_CODE (arg0) == SSA_NAME
6684 && TREE_CODE (arg1) == SSA_NAME
6685 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6688 /* Put SSA_NAMEs last. */
6689 if (TREE_CODE (arg1) == SSA_NAME)
6691 if (TREE_CODE (arg0) == SSA_NAME)
6694 /* Put variables last. */
6703 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6704 ARG0 is extended to a wider type. */
6707 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6709 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6711 tree shorter_type, outer_type;
6715 if (arg0_unw == arg0)
6717 shorter_type = TREE_TYPE (arg0_unw);
6719 #ifdef HAVE_canonicalize_funcptr_for_compare
6720 /* Disable this optimization if we're casting a function pointer
6721 type on targets that require function pointer canonicalization. */
6722 if (HAVE_canonicalize_funcptr_for_compare
6723 && TREE_CODE (shorter_type) == POINTER_TYPE
6724 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6728 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6731 arg1_unw = get_unwidened (arg1, shorter_type);
6733 /* If possible, express the comparison in the shorter mode. */
6734 if ((code == EQ_EXPR || code == NE_EXPR
6735 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6736 && (TREE_TYPE (arg1_unw) == shorter_type
6737 || (TREE_CODE (arg1_unw) == INTEGER_CST
6738 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6739 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6740 && int_fits_type_p (arg1_unw, shorter_type))))
6741 return fold_build2 (code, type, arg0_unw,
6742 fold_convert (shorter_type, arg1_unw));
6744 if (TREE_CODE (arg1_unw) != INTEGER_CST
6745 || TREE_CODE (shorter_type) != INTEGER_TYPE
6746 || !int_fits_type_p (arg1_unw, shorter_type))
6749 /* If we are comparing with the integer that does not fit into the range
6750 of the shorter type, the result is known. */
6751 outer_type = TREE_TYPE (arg1_unw);
6752 min = lower_bound_in_type (outer_type, shorter_type);
6753 max = upper_bound_in_type (outer_type, shorter_type);
6755 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6757 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6764 return omit_one_operand (type, integer_zero_node, arg0);
6769 return omit_one_operand (type, integer_one_node, arg0);
6775 return omit_one_operand (type, integer_one_node, arg0);
6777 return omit_one_operand (type, integer_zero_node, arg0);
6782 return omit_one_operand (type, integer_zero_node, arg0);
6784 return omit_one_operand (type, integer_one_node, arg0);
6793 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6794 ARG0 just the signedness is changed. */
6797 fold_sign_changed_comparison (enum tree_code code, tree type,
6798 tree arg0, tree arg1)
6801 tree inner_type, outer_type;
6803 if (TREE_CODE (arg0) != NOP_EXPR
6804 && TREE_CODE (arg0) != CONVERT_EXPR)
6807 outer_type = TREE_TYPE (arg0);
6808 arg0_inner = TREE_OPERAND (arg0, 0);
6809 inner_type = TREE_TYPE (arg0_inner);
6811 #ifdef HAVE_canonicalize_funcptr_for_compare
6812 /* Disable this optimization if we're casting a function pointer
6813 type on targets that require function pointer canonicalization. */
6814 if (HAVE_canonicalize_funcptr_for_compare
6815 && TREE_CODE (inner_type) == POINTER_TYPE
6816 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6820 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6823 if (TREE_CODE (arg1) != INTEGER_CST
6824 && !((TREE_CODE (arg1) == NOP_EXPR
6825 || TREE_CODE (arg1) == CONVERT_EXPR)
6826 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6829 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6834 if (TREE_CODE (arg1) == INTEGER_CST)
6835 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6836 TREE_INT_CST_HIGH (arg1), 0,
6837 TREE_OVERFLOW (arg1));
6839 arg1 = fold_convert (inner_type, arg1);
6841 return fold_build2 (code, type, arg0_inner, arg1);
6844 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6845 step of the array. Reconstructs s and delta in the case of s * delta
6846 being an integer constant (and thus already folded).
6847 ADDR is the address. MULT is the multiplicative expression.
6848 If the function succeeds, the new address expression is returned. Otherwise
6849 NULL_TREE is returned. */
6852 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6854 tree s, delta, step;
6855 tree ref = TREE_OPERAND (addr, 0), pref;
6860 /* Canonicalize op1 into a possibly non-constant delta
6861 and an INTEGER_CST s. */
6862 if (TREE_CODE (op1) == MULT_EXPR)
6864 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6869 if (TREE_CODE (arg0) == INTEGER_CST)
6874 else if (TREE_CODE (arg1) == INTEGER_CST)
6882 else if (TREE_CODE (op1) == INTEGER_CST)
6889 /* Simulate we are delta * 1. */
6891 s = integer_one_node;
6894 for (;; ref = TREE_OPERAND (ref, 0))
6896 if (TREE_CODE (ref) == ARRAY_REF)
6898 /* Remember if this was a multi-dimensional array. */
6899 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6902 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6906 step = array_ref_element_size (ref);
6907 if (TREE_CODE (step) != INTEGER_CST)
6912 if (! tree_int_cst_equal (step, s))
6917 /* Try if delta is a multiple of step. */
6918 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6924 /* Only fold here if we can verify we do not overflow one
6925 dimension of a multi-dimensional array. */
6930 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6931 || !INTEGRAL_TYPE_P (itype)
6932 || !TYPE_MAX_VALUE (itype)
6933 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6936 tmp = fold_binary (code, itype,
6937 fold_convert (itype,
6938 TREE_OPERAND (ref, 1)),
6939 fold_convert (itype, delta));
6941 || TREE_CODE (tmp) != INTEGER_CST
6942 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6951 if (!handled_component_p (ref))
6955 /* We found the suitable array reference. So copy everything up to it,
6956 and replace the index. */
6958 pref = TREE_OPERAND (addr, 0);
6959 ret = copy_node (pref);
6964 pref = TREE_OPERAND (pref, 0);
6965 TREE_OPERAND (pos, 0) = copy_node (pref);
6966 pos = TREE_OPERAND (pos, 0);
6969 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6970 fold_convert (itype,
6971 TREE_OPERAND (pos, 1)),
6972 fold_convert (itype, delta));
6974 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6978 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6979 means A >= Y && A != MAX, but in this case we know that
6980 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6983 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6985 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6987 if (TREE_CODE (bound) == LT_EXPR)
6988 a = TREE_OPERAND (bound, 0);
6989 else if (TREE_CODE (bound) == GT_EXPR)
6990 a = TREE_OPERAND (bound, 1);
6994 typea = TREE_TYPE (a);
6995 if (!INTEGRAL_TYPE_P (typea)
6996 && !POINTER_TYPE_P (typea))
6999 if (TREE_CODE (ineq) == LT_EXPR)
7001 a1 = TREE_OPERAND (ineq, 1);
7002 y = TREE_OPERAND (ineq, 0);
7004 else if (TREE_CODE (ineq) == GT_EXPR)
7006 a1 = TREE_OPERAND (ineq, 0);
7007 y = TREE_OPERAND (ineq, 1);
7012 if (TREE_TYPE (a1) != typea)
7015 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7016 if (!integer_onep (diff))
7019 return fold_build2 (GE_EXPR, type, a, y);
7022 /* Fold a sum or difference of at least one multiplication.
7023 Returns the folded tree or NULL if no simplification could be made. */
7026 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7028 tree arg00, arg01, arg10, arg11;
7029 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7031 /* (A * C) +- (B * C) -> (A+-B) * C.
7032 (A * C) +- A -> A * (C+-1).
7033 We are most concerned about the case where C is a constant,
7034 but other combinations show up during loop reduction. Since
7035 it is not difficult, try all four possibilities. */
7037 if (TREE_CODE (arg0) == MULT_EXPR)
7039 arg00 = TREE_OPERAND (arg0, 0);
7040 arg01 = TREE_OPERAND (arg0, 1);
7045 arg01 = build_one_cst (type);
7047 if (TREE_CODE (arg1) == MULT_EXPR)
7049 arg10 = TREE_OPERAND (arg1, 0);
7050 arg11 = TREE_OPERAND (arg1, 1);
7055 arg11 = build_one_cst (type);
7059 if (operand_equal_p (arg01, arg11, 0))
7060 same = arg01, alt0 = arg00, alt1 = arg10;
7061 else if (operand_equal_p (arg00, arg10, 0))
7062 same = arg00, alt0 = arg01, alt1 = arg11;
7063 else if (operand_equal_p (arg00, arg11, 0))
7064 same = arg00, alt0 = arg01, alt1 = arg10;
7065 else if (operand_equal_p (arg01, arg10, 0))
7066 same = arg01, alt0 = arg00, alt1 = arg11;
7068 /* No identical multiplicands; see if we can find a common
7069 power-of-two factor in non-power-of-two multiplies. This
7070 can help in multi-dimensional array access. */
7071 else if (host_integerp (arg01, 0)
7072 && host_integerp (arg11, 0))
7074 HOST_WIDE_INT int01, int11, tmp;
7077 int01 = TREE_INT_CST_LOW (arg01);
7078 int11 = TREE_INT_CST_LOW (arg11);
7080 /* Move min of absolute values to int11. */
7081 if ((int01 >= 0 ? int01 : -int01)
7082 < (int11 >= 0 ? int11 : -int11))
7084 tmp = int01, int01 = int11, int11 = tmp;
7085 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7092 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7094 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7095 build_int_cst (TREE_TYPE (arg00),
7100 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7105 return fold_build2 (MULT_EXPR, type,
7106 fold_build2 (code, type,
7107 fold_convert (type, alt0),
7108 fold_convert (type, alt1)),
7109 fold_convert (type, same));
7114 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7115 specified by EXPR into the buffer PTR of length LEN bytes.
7116 Return the number of bytes placed in the buffer, or zero
7120 native_encode_int (tree expr, unsigned char *ptr, int len)
7122 tree type = TREE_TYPE (expr);
7123 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7124 int byte, offset, word, words;
7125 unsigned char value;
7127 if (total_bytes > len)
7129 words = total_bytes / UNITS_PER_WORD;
7131 for (byte = 0; byte < total_bytes; byte++)
7133 int bitpos = byte * BITS_PER_UNIT;
7134 if (bitpos < HOST_BITS_PER_WIDE_INT)
7135 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7137 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7138 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7140 if (total_bytes > UNITS_PER_WORD)
7142 word = byte / UNITS_PER_WORD;
7143 if (WORDS_BIG_ENDIAN)
7144 word = (words - 1) - word;
7145 offset = word * UNITS_PER_WORD;
7146 if (BYTES_BIG_ENDIAN)
7147 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7149 offset += byte % UNITS_PER_WORD;
7152 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7153 ptr[offset] = value;
7159 /* Subroutine of native_encode_expr. Encode the REAL_CST
7160 specified by EXPR into the buffer PTR of length LEN bytes.
7161 Return the number of bytes placed in the buffer, or zero
7165 native_encode_real (tree expr, unsigned char *ptr, int len)
7167 tree type = TREE_TYPE (expr);
7168 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7169 int byte, offset, word, words, bitpos;
7170 unsigned char value;
7172 /* There are always 32 bits in each long, no matter the size of
7173 the hosts long. We handle floating point representations with
7177 if (total_bytes > len)
7179 words = 32 / UNITS_PER_WORD;
7181 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7183 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7184 bitpos += BITS_PER_UNIT)
7186 byte = (bitpos / BITS_PER_UNIT) & 3;
7187 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7189 if (UNITS_PER_WORD < 4)
7191 word = byte / UNITS_PER_WORD;
7192 if (WORDS_BIG_ENDIAN)
7193 word = (words - 1) - word;
7194 offset = word * UNITS_PER_WORD;
7195 if (BYTES_BIG_ENDIAN)
7196 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7198 offset += byte % UNITS_PER_WORD;
7201 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7202 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7207 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7208 specified by EXPR into the buffer PTR of length LEN bytes.
7209 Return the number of bytes placed in the buffer, or zero
7213 native_encode_complex (tree expr, unsigned char *ptr, int len)
7218 part = TREE_REALPART (expr);
7219 rsize = native_encode_expr (part, ptr, len);
7222 part = TREE_IMAGPART (expr);
7223 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7226 return rsize + isize;
7230 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7231 specified by EXPR into the buffer PTR of length LEN bytes.
7232 Return the number of bytes placed in the buffer, or zero
7236 native_encode_vector (tree expr, unsigned char *ptr, int len)
7238 int i, size, offset, count;
7239 tree itype, elem, elements;
7242 elements = TREE_VECTOR_CST_ELTS (expr);
7243 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7244 itype = TREE_TYPE (TREE_TYPE (expr));
7245 size = GET_MODE_SIZE (TYPE_MODE (itype));
7246 for (i = 0; i < count; i++)
7250 elem = TREE_VALUE (elements);
7251 elements = TREE_CHAIN (elements);
7258 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7263 if (offset + size > len)
7265 memset (ptr+offset, 0, size);
7273 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7274 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7275 buffer PTR of length LEN bytes. Return the number of bytes
7276 placed in the buffer, or zero upon failure. */
7279 native_encode_expr (tree expr, unsigned char *ptr, int len)
7281 switch (TREE_CODE (expr))
7284 return native_encode_int (expr, ptr, len);
7287 return native_encode_real (expr, ptr, len);
7290 return native_encode_complex (expr, ptr, len);
7293 return native_encode_vector (expr, ptr, len);
7301 /* Subroutine of native_interpret_expr. Interpret the contents of
7302 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7303 If the buffer cannot be interpreted, return NULL_TREE. */
7306 native_interpret_int (tree type, unsigned char *ptr, int len)
7308 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7309 int byte, offset, word, words;
7310 unsigned char value;
7311 unsigned int HOST_WIDE_INT lo = 0;
7312 HOST_WIDE_INT hi = 0;
7314 if (total_bytes > len)
7316 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7318 words = total_bytes / UNITS_PER_WORD;
7320 for (byte = 0; byte < total_bytes; byte++)
7322 int bitpos = byte * BITS_PER_UNIT;
7323 if (total_bytes > UNITS_PER_WORD)
7325 word = byte / UNITS_PER_WORD;
7326 if (WORDS_BIG_ENDIAN)
7327 word = (words - 1) - word;
7328 offset = word * UNITS_PER_WORD;
7329 if (BYTES_BIG_ENDIAN)
7330 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7332 offset += byte % UNITS_PER_WORD;
7335 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7336 value = ptr[offset];
7338 if (bitpos < HOST_BITS_PER_WIDE_INT)
7339 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7341 hi |= (unsigned HOST_WIDE_INT) value
7342 << (bitpos - HOST_BITS_PER_WIDE_INT);
7345 return build_int_cst_wide_type (type, lo, hi);
7349 /* Subroutine of native_interpret_expr. Interpret the contents of
7350 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7351 If the buffer cannot be interpreted, return NULL_TREE. */
7354 native_interpret_real (tree type, unsigned char *ptr, int len)
7356 enum machine_mode mode = TYPE_MODE (type);
7357 int total_bytes = GET_MODE_SIZE (mode);
7358 int byte, offset, word, words, bitpos;
7359 unsigned char value;
7360 /* There are always 32 bits in each long, no matter the size of
7361 the hosts long. We handle floating point representations with
7366 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7367 if (total_bytes > len || total_bytes > 24)
7369 words = 32 / UNITS_PER_WORD;
7371 memset (tmp, 0, sizeof (tmp));
7372 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7373 bitpos += BITS_PER_UNIT)
7375 byte = (bitpos / BITS_PER_UNIT) & 3;
7376 if (UNITS_PER_WORD < 4)
7378 word = byte / UNITS_PER_WORD;
7379 if (WORDS_BIG_ENDIAN)
7380 word = (words - 1) - word;
7381 offset = word * UNITS_PER_WORD;
7382 if (BYTES_BIG_ENDIAN)
7383 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7385 offset += byte % UNITS_PER_WORD;
7388 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7389 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7391 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7394 real_from_target (&r, tmp, mode);
7395 return build_real (type, r);
7399 /* Subroutine of native_interpret_expr. Interpret the contents of
7400 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7401 If the buffer cannot be interpreted, return NULL_TREE. */
7404 native_interpret_complex (tree type, unsigned char *ptr, int len)
7406 tree etype, rpart, ipart;
7409 etype = TREE_TYPE (type);
7410 size = GET_MODE_SIZE (TYPE_MODE (etype));
7413 rpart = native_interpret_expr (etype, ptr, size);
7416 ipart = native_interpret_expr (etype, ptr+size, size);
7419 return build_complex (type, rpart, ipart);
7423 /* Subroutine of native_interpret_expr. Interpret the contents of
7424 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7425 If the buffer cannot be interpreted, return NULL_TREE. */
7428 native_interpret_vector (tree type, unsigned char *ptr, int len)
7430 tree etype, elem, elements;
7433 etype = TREE_TYPE (type);
7434 size = GET_MODE_SIZE (TYPE_MODE (etype));
7435 count = TYPE_VECTOR_SUBPARTS (type);
7436 if (size * count > len)
7439 elements = NULL_TREE;
7440 for (i = count - 1; i >= 0; i--)
7442 elem = native_interpret_expr (etype, ptr+(i*size), size);
7445 elements = tree_cons (NULL_TREE, elem, elements);
7447 return build_vector (type, elements);
7451 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7452 the buffer PTR of length LEN as a constant of type TYPE. For
7453 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7454 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7455 return NULL_TREE. */
7458 native_interpret_expr (tree type, unsigned char *ptr, int len)
7460 switch (TREE_CODE (type))
7465 return native_interpret_int (type, ptr, len);
7468 return native_interpret_real (type, ptr, len);
7471 return native_interpret_complex (type, ptr, len);
7474 return native_interpret_vector (type, ptr, len);
7482 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7483 TYPE at compile-time. If we're unable to perform the conversion
7484 return NULL_TREE. */
7487 fold_view_convert_expr (tree type, tree expr)
7489 /* We support up to 512-bit values (for V8DFmode). */
7490 unsigned char buffer[64];
7493 /* Check that the host and target are sane. */
7494 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7497 len = native_encode_expr (expr, buffer, sizeof (buffer));
7501 return native_interpret_expr (type, buffer, len);
7505 /* Fold a unary expression of code CODE and type TYPE with operand
7506 OP0. Return the folded expression if folding is successful.
7507 Otherwise, return NULL_TREE. */
7510 fold_unary (enum tree_code code, tree type, tree op0)
7514 enum tree_code_class kind = TREE_CODE_CLASS (code);
7516 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7517 && TREE_CODE_LENGTH (code) == 1);
7522 if (code == NOP_EXPR || code == CONVERT_EXPR
7523 || code == FLOAT_EXPR || code == ABS_EXPR)
7525 /* Don't use STRIP_NOPS, because signedness of argument type
7527 STRIP_SIGN_NOPS (arg0);
7531 /* Strip any conversions that don't change the mode. This
7532 is safe for every expression, except for a comparison
7533 expression because its signedness is derived from its
7536 Note that this is done as an internal manipulation within
7537 the constant folder, in order to find the simplest
7538 representation of the arguments so that their form can be
7539 studied. In any cases, the appropriate type conversions
7540 should be put back in the tree that will get out of the
7546 if (TREE_CODE_CLASS (code) == tcc_unary)
7548 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7549 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7550 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7551 else if (TREE_CODE (arg0) == COND_EXPR)
7553 tree arg01 = TREE_OPERAND (arg0, 1);
7554 tree arg02 = TREE_OPERAND (arg0, 2);
7555 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7556 arg01 = fold_build1 (code, type, arg01);
7557 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7558 arg02 = fold_build1 (code, type, arg02);
7559 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7562 /* If this was a conversion, and all we did was to move into
7563 inside the COND_EXPR, bring it back out. But leave it if
7564 it is a conversion from integer to integer and the
7565 result precision is no wider than a word since such a
7566 conversion is cheap and may be optimized away by combine,
7567 while it couldn't if it were outside the COND_EXPR. Then return
7568 so we don't get into an infinite recursion loop taking the
7569 conversion out and then back in. */
7571 if ((code == NOP_EXPR || code == CONVERT_EXPR
7572 || code == NON_LVALUE_EXPR)
7573 && TREE_CODE (tem) == COND_EXPR
7574 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7575 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7576 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7577 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7578 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7579 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7580 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7582 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7583 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7584 || flag_syntax_only))
7585 tem = build1 (code, type,
7587 TREE_TYPE (TREE_OPERAND
7588 (TREE_OPERAND (tem, 1), 0)),
7589 TREE_OPERAND (tem, 0),
7590 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7591 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7594 else if (COMPARISON_CLASS_P (arg0))
7596 if (TREE_CODE (type) == BOOLEAN_TYPE)
7598 arg0 = copy_node (arg0);
7599 TREE_TYPE (arg0) = type;
7602 else if (TREE_CODE (type) != INTEGER_TYPE)
7603 return fold_build3 (COND_EXPR, type, arg0,
7604 fold_build1 (code, type,
7606 fold_build1 (code, type,
7607 integer_zero_node));
7616 case FIX_TRUNC_EXPR:
7617 if (TREE_TYPE (op0) == type)
7620 /* If we have (type) (a CMP b) and type is an integral type, return
7621 new expression involving the new type. */
7622 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7623 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7624 TREE_OPERAND (op0, 1));
7626 /* Handle cases of two conversions in a row. */
7627 if (TREE_CODE (op0) == NOP_EXPR
7628 || TREE_CODE (op0) == CONVERT_EXPR)
7630 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7631 tree inter_type = TREE_TYPE (op0);
7632 int inside_int = INTEGRAL_TYPE_P (inside_type);
7633 int inside_ptr = POINTER_TYPE_P (inside_type);
7634 int inside_float = FLOAT_TYPE_P (inside_type);
7635 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7636 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7637 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7638 int inter_int = INTEGRAL_TYPE_P (inter_type);
7639 int inter_ptr = POINTER_TYPE_P (inter_type);
7640 int inter_float = FLOAT_TYPE_P (inter_type);
7641 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7642 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7643 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7644 int final_int = INTEGRAL_TYPE_P (type);
7645 int final_ptr = POINTER_TYPE_P (type);
7646 int final_float = FLOAT_TYPE_P (type);
7647 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7648 unsigned int final_prec = TYPE_PRECISION (type);
7649 int final_unsignedp = TYPE_UNSIGNED (type);
7651 /* In addition to the cases of two conversions in a row
7652 handled below, if we are converting something to its own
7653 type via an object of identical or wider precision, neither
7654 conversion is needed. */
7655 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7656 && (((inter_int || inter_ptr) && final_int)
7657 || (inter_float && final_float))
7658 && inter_prec >= final_prec)
7659 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7661 /* Likewise, if the intermediate and final types are either both
7662 float or both integer, we don't need the middle conversion if
7663 it is wider than the final type and doesn't change the signedness
7664 (for integers). Avoid this if the final type is a pointer
7665 since then we sometimes need the inner conversion. Likewise if
7666 the outer has a precision not equal to the size of its mode. */
7667 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7668 || (inter_float && inside_float)
7669 || (inter_vec && inside_vec))
7670 && inter_prec >= inside_prec
7671 && (inter_float || inter_vec
7672 || inter_unsignedp == inside_unsignedp)
7673 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7674 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7676 && (! final_vec || inter_prec == inside_prec))
7677 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7679 /* If we have a sign-extension of a zero-extended value, we can
7680 replace that by a single zero-extension. */
7681 if (inside_int && inter_int && final_int
7682 && inside_prec < inter_prec && inter_prec < final_prec
7683 && inside_unsignedp && !inter_unsignedp)
7684 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7686 /* Two conversions in a row are not needed unless:
7687 - some conversion is floating-point (overstrict for now), or
7688 - some conversion is a vector (overstrict for now), or
7689 - the intermediate type is narrower than both initial and
7691 - the intermediate type and innermost type differ in signedness,
7692 and the outermost type is wider than the intermediate, or
7693 - the initial type is a pointer type and the precisions of the
7694 intermediate and final types differ, or
7695 - the final type is a pointer type and the precisions of the
7696 initial and intermediate types differ.
7697 - the final type is a pointer type and the initial type not
7698 - the initial type is a pointer to an array and the final type
7700 if (! inside_float && ! inter_float && ! final_float
7701 && ! inside_vec && ! inter_vec && ! final_vec
7702 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7703 && ! (inside_int && inter_int
7704 && inter_unsignedp != inside_unsignedp
7705 && inter_prec < final_prec)
7706 && ((inter_unsignedp && inter_prec > inside_prec)
7707 == (final_unsignedp && final_prec > inter_prec))
7708 && ! (inside_ptr && inter_prec != final_prec)
7709 && ! (final_ptr && inside_prec != inter_prec)
7710 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7711 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7712 && final_ptr == inside_ptr
7714 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7715 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7716 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7719 /* Handle (T *)&A.B.C for A being of type T and B and C
7720 living at offset zero. This occurs frequently in
7721 C++ upcasting and then accessing the base. */
7722 if (TREE_CODE (op0) == ADDR_EXPR
7723 && POINTER_TYPE_P (type)
7724 && handled_component_p (TREE_OPERAND (op0, 0)))
7726 HOST_WIDE_INT bitsize, bitpos;
7728 enum machine_mode mode;
7729 int unsignedp, volatilep;
7730 tree base = TREE_OPERAND (op0, 0);
7731 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7732 &mode, &unsignedp, &volatilep, false);
7733 /* If the reference was to a (constant) zero offset, we can use
7734 the address of the base if it has the same base type
7735 as the result type. */
7736 if (! offset && bitpos == 0
7737 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7738 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7739 return fold_convert (type, build_fold_addr_expr (base));
7742 /* Convert (type *)&A into &A->field_of_type_and_offset_0. */
7743 if (TREE_CODE (op0) == ADDR_EXPR && POINTER_TYPE_P (type)
7744 && (tem = maybe_fold_offset_to_component_ref
7745 (TREE_TYPE (TREE_OPERAND (op0, 0)), TREE_OPERAND (op0, 0),
7746 integer_zero_node, TREE_TYPE (type), false)))
7747 return build_fold_addr_expr_with_type (tem, type);
7749 if ((TREE_CODE (op0) == MODIFY_EXPR
7750 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7751 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7752 /* Detect assigning a bitfield. */
7753 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7755 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7757 /* Don't leave an assignment inside a conversion
7758 unless assigning a bitfield. */
7759 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7760 /* First do the assignment, then return converted constant. */
7761 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7762 TREE_NO_WARNING (tem) = 1;
7763 TREE_USED (tem) = 1;
7767 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7768 constants (if x has signed type, the sign bit cannot be set
7769 in c). This folds extension into the BIT_AND_EXPR. */
7770 if (INTEGRAL_TYPE_P (type)
7771 && TREE_CODE (type) != BOOLEAN_TYPE
7772 && TREE_CODE (op0) == BIT_AND_EXPR
7773 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7776 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7779 if (TYPE_UNSIGNED (TREE_TYPE (and))
7780 || (TYPE_PRECISION (type)
7781 <= TYPE_PRECISION (TREE_TYPE (and))))
7783 else if (TYPE_PRECISION (TREE_TYPE (and1))
7784 <= HOST_BITS_PER_WIDE_INT
7785 && host_integerp (and1, 1))
7787 unsigned HOST_WIDE_INT cst;
7789 cst = tree_low_cst (and1, 1);
7790 cst &= (HOST_WIDE_INT) -1
7791 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7792 change = (cst == 0);
7793 #ifdef LOAD_EXTEND_OP
7795 && !flag_syntax_only
7796 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7799 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7800 and0 = fold_convert (uns, and0);
7801 and1 = fold_convert (uns, and1);
7807 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7808 TREE_INT_CST_HIGH (and1), 0,
7809 TREE_OVERFLOW (and1));
7810 return fold_build2 (BIT_AND_EXPR, type,
7811 fold_convert (type, and0), tem);
7815 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7816 T2 being pointers to types of the same size. */
7817 if (POINTER_TYPE_P (type)
7818 && BINARY_CLASS_P (arg0)
7819 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7820 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7822 tree arg00 = TREE_OPERAND (arg0, 0);
7824 tree t1 = TREE_TYPE (arg00);
7825 tree tt0 = TREE_TYPE (t0);
7826 tree tt1 = TREE_TYPE (t1);
7827 tree s0 = TYPE_SIZE (tt0);
7828 tree s1 = TYPE_SIZE (tt1);
7830 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7831 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7832 TREE_OPERAND (arg0, 1));
7835 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7836 of the same precision, and X is a integer type not narrower than
7837 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7838 if (INTEGRAL_TYPE_P (type)
7839 && TREE_CODE (op0) == BIT_NOT_EXPR
7840 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7841 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7842 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7843 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7845 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7846 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7847 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7848 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7851 tem = fold_convert_const (code, type, arg0);
7852 return tem ? tem : NULL_TREE;
7854 case VIEW_CONVERT_EXPR:
7855 if (TREE_TYPE (op0) == type)
7857 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7858 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7859 return fold_view_convert_expr (type, op0);
7862 tem = fold_negate_expr (arg0);
7864 return fold_convert (type, tem);
7868 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7869 return fold_abs_const (arg0, type);
7870 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7871 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7872 /* Convert fabs((double)float) into (double)fabsf(float). */
7873 else if (TREE_CODE (arg0) == NOP_EXPR
7874 && TREE_CODE (type) == REAL_TYPE)
7876 tree targ0 = strip_float_extensions (arg0);
7878 return fold_convert (type, fold_build1 (ABS_EXPR,
7882 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7883 else if (TREE_CODE (arg0) == ABS_EXPR)
7885 else if (tree_expr_nonnegative_p (arg0))
7888 /* Strip sign ops from argument. */
7889 if (TREE_CODE (type) == REAL_TYPE)
7891 tem = fold_strip_sign_ops (arg0);
7893 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7898 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7899 return fold_convert (type, arg0);
7900 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7902 tree itype = TREE_TYPE (type);
7903 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7904 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7905 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7907 if (TREE_CODE (arg0) == COMPLEX_CST)
7909 tree itype = TREE_TYPE (type);
7910 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7911 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7912 return build_complex (type, rpart, negate_expr (ipart));
7914 if (TREE_CODE (arg0) == CONJ_EXPR)
7915 return fold_convert (type, TREE_OPERAND (arg0, 0));
7919 if (TREE_CODE (arg0) == INTEGER_CST)
7920 return fold_not_const (arg0, type);
7921 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7922 return TREE_OPERAND (arg0, 0);
7923 /* Convert ~ (-A) to A - 1. */
7924 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7925 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7926 build_int_cst (type, 1));
7927 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7928 else if (INTEGRAL_TYPE_P (type)
7929 && ((TREE_CODE (arg0) == MINUS_EXPR
7930 && integer_onep (TREE_OPERAND (arg0, 1)))
7931 || (TREE_CODE (arg0) == PLUS_EXPR
7932 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7933 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7934 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7935 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7936 && (tem = fold_unary (BIT_NOT_EXPR, type,
7938 TREE_OPERAND (arg0, 0)))))
7939 return fold_build2 (BIT_XOR_EXPR, type, tem,
7940 fold_convert (type, TREE_OPERAND (arg0, 1)));
7941 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7942 && (tem = fold_unary (BIT_NOT_EXPR, type,
7944 TREE_OPERAND (arg0, 1)))))
7945 return fold_build2 (BIT_XOR_EXPR, type,
7946 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7950 case TRUTH_NOT_EXPR:
7951 /* The argument to invert_truthvalue must have Boolean type. */
7952 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7953 arg0 = fold_convert (boolean_type_node, arg0);
7955 /* Note that the operand of this must be an int
7956 and its values must be 0 or 1.
7957 ("true" is a fixed value perhaps depending on the language,
7958 but we don't handle values other than 1 correctly yet.) */
7959 tem = fold_truth_not_expr (arg0);
7962 return fold_convert (type, tem);
7965 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7966 return fold_convert (type, arg0);
7967 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7968 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7969 TREE_OPERAND (arg0, 1));
7970 if (TREE_CODE (arg0) == COMPLEX_CST)
7971 return fold_convert (type, TREE_REALPART (arg0));
7972 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7974 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7975 tem = fold_build2 (TREE_CODE (arg0), itype,
7976 fold_build1 (REALPART_EXPR, itype,
7977 TREE_OPERAND (arg0, 0)),
7978 fold_build1 (REALPART_EXPR, itype,
7979 TREE_OPERAND (arg0, 1)));
7980 return fold_convert (type, tem);
7982 if (TREE_CODE (arg0) == CONJ_EXPR)
7984 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7985 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7986 return fold_convert (type, tem);
7988 if (TREE_CODE (arg0) == CALL_EXPR)
7990 tree fn = get_callee_fndecl (arg0);
7991 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7992 switch (DECL_FUNCTION_CODE (fn))
7994 CASE_FLT_FN (BUILT_IN_CEXPI):
7995 fn = mathfn_built_in (type, BUILT_IN_COS);
7997 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8007 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8008 return fold_convert (type, integer_zero_node);
8009 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8010 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8011 TREE_OPERAND (arg0, 0));
8012 if (TREE_CODE (arg0) == COMPLEX_CST)
8013 return fold_convert (type, TREE_IMAGPART (arg0));
8014 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8016 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8017 tem = fold_build2 (TREE_CODE (arg0), itype,
8018 fold_build1 (IMAGPART_EXPR, itype,
8019 TREE_OPERAND (arg0, 0)),
8020 fold_build1 (IMAGPART_EXPR, itype,
8021 TREE_OPERAND (arg0, 1)));
8022 return fold_convert (type, tem);
8024 if (TREE_CODE (arg0) == CONJ_EXPR)
8026 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8027 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8028 return fold_convert (type, negate_expr (tem));
8030 if (TREE_CODE (arg0) == CALL_EXPR)
8032 tree fn = get_callee_fndecl (arg0);
8033 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8034 switch (DECL_FUNCTION_CODE (fn))
8036 CASE_FLT_FN (BUILT_IN_CEXPI):
8037 fn = mathfn_built_in (type, BUILT_IN_SIN);
8039 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8050 } /* switch (code) */
8053 /* Fold a binary expression of code CODE and type TYPE with operands
8054 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8055 Return the folded expression if folding is successful. Otherwise,
8056 return NULL_TREE. */
8059 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8061 enum tree_code compl_code;
8063 if (code == MIN_EXPR)
8064 compl_code = MAX_EXPR;
8065 else if (code == MAX_EXPR)
8066 compl_code = MIN_EXPR;
8070 /* MIN (MAX (a, b), b) == b. */
8071 if (TREE_CODE (op0) == compl_code
8072 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8073 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8075 /* MIN (MAX (b, a), b) == b. */
8076 if (TREE_CODE (op0) == compl_code
8077 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8078 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8079 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8081 /* MIN (a, MAX (a, b)) == a. */
8082 if (TREE_CODE (op1) == compl_code
8083 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8084 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8085 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8087 /* MIN (a, MAX (b, a)) == a. */
8088 if (TREE_CODE (op1) == compl_code
8089 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8090 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8091 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8096 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8097 by changing CODE to reduce the magnitude of constants involved in
8098 ARG0 of the comparison.
8099 Returns a canonicalized comparison tree if a simplification was
8100 possible, otherwise returns NULL_TREE.
8101 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8102 valid if signed overflow is undefined. */
8105 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8106 tree arg0, tree arg1,
8107 bool *strict_overflow_p)
8109 enum tree_code code0 = TREE_CODE (arg0);
8110 tree t, cst0 = NULL_TREE;
8114 /* Match A +- CST code arg1 and CST code arg1. */
8115 if (!(((code0 == MINUS_EXPR
8116 || code0 == PLUS_EXPR)
8117 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8118 || code0 == INTEGER_CST))
8121 /* Identify the constant in arg0 and its sign. */
8122 if (code0 == INTEGER_CST)
8125 cst0 = TREE_OPERAND (arg0, 1);
8126 sgn0 = tree_int_cst_sgn (cst0);
8128 /* Overflowed constants and zero will cause problems. */
8129 if (integer_zerop (cst0)
8130 || TREE_OVERFLOW (cst0))
8133 /* See if we can reduce the magnitude of the constant in
8134 arg0 by changing the comparison code. */
8135 if (code0 == INTEGER_CST)
8137 /* CST <= arg1 -> CST-1 < arg1. */
8138 if (code == LE_EXPR && sgn0 == 1)
8140 /* -CST < arg1 -> -CST-1 <= arg1. */
8141 else if (code == LT_EXPR && sgn0 == -1)
8143 /* CST > arg1 -> CST-1 >= arg1. */
8144 else if (code == GT_EXPR && sgn0 == 1)
8146 /* -CST >= arg1 -> -CST-1 > arg1. */
8147 else if (code == GE_EXPR && sgn0 == -1)
8151 /* arg1 code' CST' might be more canonical. */
8156 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8158 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8160 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8161 else if (code == GT_EXPR
8162 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8164 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8165 else if (code == LE_EXPR
8166 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8168 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8169 else if (code == GE_EXPR
8170 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8174 *strict_overflow_p = true;
8177 /* Now build the constant reduced in magnitude. */
8178 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8179 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8180 if (code0 != INTEGER_CST)
8181 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8183 /* If swapping might yield to a more canonical form, do so. */
8185 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8187 return fold_build2 (code, type, t, arg1);
8190 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8191 overflow further. Try to decrease the magnitude of constants involved
8192 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8193 and put sole constants at the second argument position.
8194 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8197 maybe_canonicalize_comparison (enum tree_code code, tree type,
8198 tree arg0, tree arg1)
8201 bool strict_overflow_p;
8202 const char * const warnmsg = G_("assuming signed overflow does not occur "
8203 "when reducing constant in comparison");
8205 /* In principle pointers also have undefined overflow behavior,
8206 but that causes problems elsewhere. */
8207 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8208 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8211 /* Try canonicalization by simplifying arg0. */
8212 strict_overflow_p = false;
8213 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8214 &strict_overflow_p);
8217 if (strict_overflow_p)
8218 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8222 /* Try canonicalization by simplifying arg1 using the swapped
8224 code = swap_tree_comparison (code);
8225 strict_overflow_p = false;
8226 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8227 &strict_overflow_p);
8228 if (t && strict_overflow_p)
8229 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8233 /* Subroutine of fold_binary. This routine performs all of the
8234 transformations that are common to the equality/inequality
8235 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8236 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8237 fold_binary should call fold_binary. Fold a comparison with
8238 tree code CODE and type TYPE with operands OP0 and OP1. Return
8239 the folded comparison or NULL_TREE. */
8242 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8244 tree arg0, arg1, tem;
8249 STRIP_SIGN_NOPS (arg0);
8250 STRIP_SIGN_NOPS (arg1);
8252 tem = fold_relational_const (code, type, arg0, arg1);
8253 if (tem != NULL_TREE)
8256 /* If one arg is a real or integer constant, put it last. */
8257 if (tree_swap_operands_p (arg0, arg1, true))
8258 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8260 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8261 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8262 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8263 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8264 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8265 && (TREE_CODE (arg1) == INTEGER_CST
8266 && !TREE_OVERFLOW (arg1)))
8268 tree const1 = TREE_OPERAND (arg0, 1);
8270 tree variable = TREE_OPERAND (arg0, 0);
8273 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8275 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8276 TREE_TYPE (arg1), const2, const1);
8278 /* If the constant operation overflowed this can be
8279 simplified as a comparison against INT_MAX/INT_MIN. */
8280 if (TREE_CODE (lhs) == INTEGER_CST
8281 && TREE_OVERFLOW (lhs))
8283 int const1_sgn = tree_int_cst_sgn (const1);
8284 enum tree_code code2 = code;
8286 /* Get the sign of the constant on the lhs if the
8287 operation were VARIABLE + CONST1. */
8288 if (TREE_CODE (arg0) == MINUS_EXPR)
8289 const1_sgn = -const1_sgn;
8291 /* The sign of the constant determines if we overflowed
8292 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8293 Canonicalize to the INT_MIN overflow by swapping the comparison
8295 if (const1_sgn == -1)
8296 code2 = swap_tree_comparison (code);
8298 /* We now can look at the canonicalized case
8299 VARIABLE + 1 CODE2 INT_MIN
8300 and decide on the result. */
8301 if (code2 == LT_EXPR
8303 || code2 == EQ_EXPR)
8304 return omit_one_operand (type, boolean_false_node, variable);
8305 else if (code2 == NE_EXPR
8307 || code2 == GT_EXPR)
8308 return omit_one_operand (type, boolean_true_node, variable);
8311 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8312 && (TREE_CODE (lhs) != INTEGER_CST
8313 || !TREE_OVERFLOW (lhs)))
8315 fold_overflow_warning (("assuming signed overflow does not occur "
8316 "when changing X +- C1 cmp C2 to "
8318 WARN_STRICT_OVERFLOW_COMPARISON);
8319 return fold_build2 (code, type, variable, lhs);
8323 /* For comparisons of pointers we can decompose it to a compile time
8324 comparison of the base objects and the offsets into the object.
8325 This requires at least one operand being an ADDR_EXPR to do more
8326 than the operand_equal_p test below. */
8327 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8328 && (TREE_CODE (arg0) == ADDR_EXPR
8329 || TREE_CODE (arg1) == ADDR_EXPR))
8331 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8332 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8333 enum machine_mode mode;
8334 int volatilep, unsignedp;
8335 bool indirect_base0 = false;
8337 /* Get base and offset for the access. Strip ADDR_EXPR for
8338 get_inner_reference, but put it back by stripping INDIRECT_REF
8339 off the base object if possible. */
8341 if (TREE_CODE (arg0) == ADDR_EXPR)
8343 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8344 &bitsize, &bitpos0, &offset0, &mode,
8345 &unsignedp, &volatilep, false);
8346 if (TREE_CODE (base0) == INDIRECT_REF)
8347 base0 = TREE_OPERAND (base0, 0);
8349 indirect_base0 = true;
8353 if (TREE_CODE (arg1) == ADDR_EXPR)
8355 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8356 &bitsize, &bitpos1, &offset1, &mode,
8357 &unsignedp, &volatilep, false);
8358 /* We have to make sure to have an indirect/non-indirect base1
8359 just the same as we did for base0. */
8360 if (TREE_CODE (base1) == INDIRECT_REF
8362 base1 = TREE_OPERAND (base1, 0);
8363 else if (!indirect_base0)
8366 else if (indirect_base0)
8369 /* If we have equivalent bases we might be able to simplify. */
8371 && operand_equal_p (base0, base1, 0))
8373 /* We can fold this expression to a constant if the non-constant
8374 offset parts are equal. */
8375 if (offset0 == offset1
8376 || (offset0 && offset1
8377 && operand_equal_p (offset0, offset1, 0)))
8382 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8384 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8386 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8388 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8390 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8392 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8396 /* We can simplify the comparison to a comparison of the variable
8397 offset parts if the constant offset parts are equal.
8398 Be careful to use signed size type here because otherwise we
8399 mess with array offsets in the wrong way. This is possible
8400 because pointer arithmetic is restricted to retain within an
8401 object and overflow on pointer differences is undefined as of
8402 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8403 else if (bitpos0 == bitpos1)
8405 tree signed_size_type_node;
8406 signed_size_type_node = signed_type_for (size_type_node);
8408 /* By converting to signed size type we cover middle-end pointer
8409 arithmetic which operates on unsigned pointer types of size
8410 type size and ARRAY_REF offsets which are properly sign or
8411 zero extended from their type in case it is narrower than
8413 if (offset0 == NULL_TREE)
8414 offset0 = build_int_cst (signed_size_type_node, 0);
8416 offset0 = fold_convert (signed_size_type_node, offset0);
8417 if (offset1 == NULL_TREE)
8418 offset1 = build_int_cst (signed_size_type_node, 0);
8420 offset1 = fold_convert (signed_size_type_node, offset1);
8422 return fold_build2 (code, type, offset0, offset1);
8427 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8428 same object, then we can fold this to a comparison of the two offsets in
8429 signed size type. This is possible because pointer arithmetic is
8430 restricted to retain within an object and overflow on pointer differences
8431 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8433 We check flag_wrapv directly because pointers types are unsigned,
8434 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8435 normally what we want to avoid certain odd overflow cases, but
8437 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8439 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8441 tree base0, offset0, base1, offset1;
8443 if (extract_array_ref (arg0, &base0, &offset0)
8444 && extract_array_ref (arg1, &base1, &offset1)
8445 && operand_equal_p (base0, base1, 0))
8447 tree signed_size_type_node;
8448 signed_size_type_node = signed_type_for (size_type_node);
8450 /* By converting to signed size type we cover middle-end pointer
8451 arithmetic which operates on unsigned pointer types of size
8452 type size and ARRAY_REF offsets which are properly sign or
8453 zero extended from their type in case it is narrower than
8455 if (offset0 == NULL_TREE)
8456 offset0 = build_int_cst (signed_size_type_node, 0);
8458 offset0 = fold_convert (signed_size_type_node, offset0);
8459 if (offset1 == NULL_TREE)
8460 offset1 = build_int_cst (signed_size_type_node, 0);
8462 offset1 = fold_convert (signed_size_type_node, offset1);
8464 return fold_build2 (code, type, offset0, offset1);
8468 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8469 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8470 the resulting offset is smaller in absolute value than the
8472 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8473 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8474 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8475 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8476 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8477 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8478 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8480 tree const1 = TREE_OPERAND (arg0, 1);
8481 tree const2 = TREE_OPERAND (arg1, 1);
8482 tree variable1 = TREE_OPERAND (arg0, 0);
8483 tree variable2 = TREE_OPERAND (arg1, 0);
8485 const char * const warnmsg = G_("assuming signed overflow does not "
8486 "occur when combining constants around "
8489 /* Put the constant on the side where it doesn't overflow and is
8490 of lower absolute value than before. */
8491 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8492 ? MINUS_EXPR : PLUS_EXPR,
8494 if (!TREE_OVERFLOW (cst)
8495 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8497 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8498 return fold_build2 (code, type,
8500 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8504 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8505 ? MINUS_EXPR : PLUS_EXPR,
8507 if (!TREE_OVERFLOW (cst)
8508 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8510 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8511 return fold_build2 (code, type,
8512 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8518 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8519 signed arithmetic case. That form is created by the compiler
8520 often enough for folding it to be of value. One example is in
8521 computing loop trip counts after Operator Strength Reduction. */
8522 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8523 && TREE_CODE (arg0) == MULT_EXPR
8524 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8525 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8526 && integer_zerop (arg1))
8528 tree const1 = TREE_OPERAND (arg0, 1);
8529 tree const2 = arg1; /* zero */
8530 tree variable1 = TREE_OPERAND (arg0, 0);
8531 enum tree_code cmp_code = code;
8533 gcc_assert (!integer_zerop (const1));
8535 fold_overflow_warning (("assuming signed overflow does not occur when "
8536 "eliminating multiplication in comparison "
8538 WARN_STRICT_OVERFLOW_COMPARISON);
8540 /* If const1 is negative we swap the sense of the comparison. */
8541 if (tree_int_cst_sgn (const1) < 0)
8542 cmp_code = swap_tree_comparison (cmp_code);
8544 return fold_build2 (cmp_code, type, variable1, const2);
8547 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8551 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8553 tree targ0 = strip_float_extensions (arg0);
8554 tree targ1 = strip_float_extensions (arg1);
8555 tree newtype = TREE_TYPE (targ0);
8557 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8558 newtype = TREE_TYPE (targ1);
8560 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8561 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8562 return fold_build2 (code, type, fold_convert (newtype, targ0),
8563 fold_convert (newtype, targ1));
8565 /* (-a) CMP (-b) -> b CMP a */
8566 if (TREE_CODE (arg0) == NEGATE_EXPR
8567 && TREE_CODE (arg1) == NEGATE_EXPR)
8568 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8569 TREE_OPERAND (arg0, 0));
8571 if (TREE_CODE (arg1) == REAL_CST)
8573 REAL_VALUE_TYPE cst;
8574 cst = TREE_REAL_CST (arg1);
8576 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8577 if (TREE_CODE (arg0) == NEGATE_EXPR)
8578 return fold_build2 (swap_tree_comparison (code), type,
8579 TREE_OPERAND (arg0, 0),
8580 build_real (TREE_TYPE (arg1),
8581 REAL_VALUE_NEGATE (cst)));
8583 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8584 /* a CMP (-0) -> a CMP 0 */
8585 if (REAL_VALUE_MINUS_ZERO (cst))
8586 return fold_build2 (code, type, arg0,
8587 build_real (TREE_TYPE (arg1), dconst0));
8589 /* x != NaN is always true, other ops are always false. */
8590 if (REAL_VALUE_ISNAN (cst)
8591 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8593 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8594 return omit_one_operand (type, tem, arg0);
8597 /* Fold comparisons against infinity. */
8598 if (REAL_VALUE_ISINF (cst))
8600 tem = fold_inf_compare (code, type, arg0, arg1);
8601 if (tem != NULL_TREE)
8606 /* If this is a comparison of a real constant with a PLUS_EXPR
8607 or a MINUS_EXPR of a real constant, we can convert it into a
8608 comparison with a revised real constant as long as no overflow
8609 occurs when unsafe_math_optimizations are enabled. */
8610 if (flag_unsafe_math_optimizations
8611 && TREE_CODE (arg1) == REAL_CST
8612 && (TREE_CODE (arg0) == PLUS_EXPR
8613 || TREE_CODE (arg0) == MINUS_EXPR)
8614 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8615 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8616 ? MINUS_EXPR : PLUS_EXPR,
8617 arg1, TREE_OPERAND (arg0, 1), 0))
8618 && !TREE_OVERFLOW (tem))
8619 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8621 /* Likewise, we can simplify a comparison of a real constant with
8622 a MINUS_EXPR whose first operand is also a real constant, i.e.
8623 (c1 - x) < c2 becomes x > c1-c2. */
8624 if (flag_unsafe_math_optimizations
8625 && TREE_CODE (arg1) == REAL_CST
8626 && TREE_CODE (arg0) == MINUS_EXPR
8627 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8628 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8630 && !TREE_OVERFLOW (tem))
8631 return fold_build2 (swap_tree_comparison (code), type,
8632 TREE_OPERAND (arg0, 1), tem);
8634 /* Fold comparisons against built-in math functions. */
8635 if (TREE_CODE (arg1) == REAL_CST
8636 && flag_unsafe_math_optimizations
8637 && ! flag_errno_math)
8639 enum built_in_function fcode = builtin_mathfn_code (arg0);
8641 if (fcode != END_BUILTINS)
8643 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8644 if (tem != NULL_TREE)
8650 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8651 if (TREE_CONSTANT (arg1)
8652 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8653 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8654 /* This optimization is invalid for ordered comparisons
8655 if CONST+INCR overflows or if foo+incr might overflow.
8656 This optimization is invalid for floating point due to rounding.
8657 For pointer types we assume overflow doesn't happen. */
8658 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8659 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8660 && (code == EQ_EXPR || code == NE_EXPR))))
8662 tree varop, newconst;
8664 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8666 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8667 arg1, TREE_OPERAND (arg0, 1));
8668 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8669 TREE_OPERAND (arg0, 0),
8670 TREE_OPERAND (arg0, 1));
8674 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8675 arg1, TREE_OPERAND (arg0, 1));
8676 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8677 TREE_OPERAND (arg0, 0),
8678 TREE_OPERAND (arg0, 1));
8682 /* If VAROP is a reference to a bitfield, we must mask
8683 the constant by the width of the field. */
8684 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8685 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8686 && host_integerp (DECL_SIZE (TREE_OPERAND
8687 (TREE_OPERAND (varop, 0), 1)), 1))
8689 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8690 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8691 tree folded_compare, shift;
8693 /* First check whether the comparison would come out
8694 always the same. If we don't do that we would
8695 change the meaning with the masking. */
8696 folded_compare = fold_build2 (code, type,
8697 TREE_OPERAND (varop, 0), arg1);
8698 if (TREE_CODE (folded_compare) == INTEGER_CST)
8699 return omit_one_operand (type, folded_compare, varop);
8701 shift = build_int_cst (NULL_TREE,
8702 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8703 shift = fold_convert (TREE_TYPE (varop), shift);
8704 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8706 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8710 return fold_build2 (code, type, varop, newconst);
8713 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8714 && (TREE_CODE (arg0) == NOP_EXPR
8715 || TREE_CODE (arg0) == CONVERT_EXPR))
8717 /* If we are widening one operand of an integer comparison,
8718 see if the other operand is similarly being widened. Perhaps we
8719 can do the comparison in the narrower type. */
8720 tem = fold_widened_comparison (code, type, arg0, arg1);
8724 /* Or if we are changing signedness. */
8725 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8730 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8731 constant, we can simplify it. */
8732 if (TREE_CODE (arg1) == INTEGER_CST
8733 && (TREE_CODE (arg0) == MIN_EXPR
8734 || TREE_CODE (arg0) == MAX_EXPR)
8735 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8737 tem = optimize_minmax_comparison (code, type, op0, op1);
8742 /* Simplify comparison of something with itself. (For IEEE
8743 floating-point, we can only do some of these simplifications.) */
8744 if (operand_equal_p (arg0, arg1, 0))
8749 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8750 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8751 return constant_boolean_node (1, type);
8756 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8757 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8758 return constant_boolean_node (1, type);
8759 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8762 /* For NE, we can only do this simplification if integer
8763 or we don't honor IEEE floating point NaNs. */
8764 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8765 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8767 /* ... fall through ... */
8770 return constant_boolean_node (0, type);
8776 /* If we are comparing an expression that just has comparisons
8777 of two integer values, arithmetic expressions of those comparisons,
8778 and constants, we can simplify it. There are only three cases
8779 to check: the two values can either be equal, the first can be
8780 greater, or the second can be greater. Fold the expression for
8781 those three values. Since each value must be 0 or 1, we have
8782 eight possibilities, each of which corresponds to the constant 0
8783 or 1 or one of the six possible comparisons.
8785 This handles common cases like (a > b) == 0 but also handles
8786 expressions like ((x > y) - (y > x)) > 0, which supposedly
8787 occur in macroized code. */
8789 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8791 tree cval1 = 0, cval2 = 0;
8794 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8795 /* Don't handle degenerate cases here; they should already
8796 have been handled anyway. */
8797 && cval1 != 0 && cval2 != 0
8798 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8799 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8800 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8801 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8802 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8803 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8804 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8806 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8807 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8809 /* We can't just pass T to eval_subst in case cval1 or cval2
8810 was the same as ARG1. */
8813 = fold_build2 (code, type,
8814 eval_subst (arg0, cval1, maxval,
8818 = fold_build2 (code, type,
8819 eval_subst (arg0, cval1, maxval,
8823 = fold_build2 (code, type,
8824 eval_subst (arg0, cval1, minval,
8828 /* All three of these results should be 0 or 1. Confirm they are.
8829 Then use those values to select the proper code to use. */
8831 if (TREE_CODE (high_result) == INTEGER_CST
8832 && TREE_CODE (equal_result) == INTEGER_CST
8833 && TREE_CODE (low_result) == INTEGER_CST)
8835 /* Make a 3-bit mask with the high-order bit being the
8836 value for `>', the next for '=', and the low for '<'. */
8837 switch ((integer_onep (high_result) * 4)
8838 + (integer_onep (equal_result) * 2)
8839 + integer_onep (low_result))
8843 return omit_one_operand (type, integer_zero_node, arg0);
8864 return omit_one_operand (type, integer_one_node, arg0);
8868 return save_expr (build2 (code, type, cval1, cval2));
8869 return fold_build2 (code, type, cval1, cval2);
8874 /* Fold a comparison of the address of COMPONENT_REFs with the same
8875 type and component to a comparison of the address of the base
8876 object. In short, &x->a OP &y->a to x OP y and
8877 &x->a OP &y.a to x OP &y */
8878 if (TREE_CODE (arg0) == ADDR_EXPR
8879 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8880 && TREE_CODE (arg1) == ADDR_EXPR
8881 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8883 tree cref0 = TREE_OPERAND (arg0, 0);
8884 tree cref1 = TREE_OPERAND (arg1, 0);
8885 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8887 tree op0 = TREE_OPERAND (cref0, 0);
8888 tree op1 = TREE_OPERAND (cref1, 0);
8889 return fold_build2 (code, type,
8890 build_fold_addr_expr (op0),
8891 build_fold_addr_expr (op1));
8895 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8896 into a single range test. */
8897 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8898 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8899 && TREE_CODE (arg1) == INTEGER_CST
8900 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8901 && !integer_zerop (TREE_OPERAND (arg0, 1))
8902 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8903 && !TREE_OVERFLOW (arg1))
8905 tem = fold_div_compare (code, type, arg0, arg1);
8906 if (tem != NULL_TREE)
8910 /* Fold ~X op ~Y as Y op X. */
8911 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8912 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8914 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8915 return fold_build2 (code, type,
8916 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8917 TREE_OPERAND (arg0, 0));
8920 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8921 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8922 && TREE_CODE (arg1) == INTEGER_CST)
8924 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8925 return fold_build2 (swap_tree_comparison (code), type,
8926 TREE_OPERAND (arg0, 0),
8927 fold_build1 (BIT_NOT_EXPR, cmp_type,
8928 fold_convert (cmp_type, arg1)));
8935 /* Subroutine of fold_binary. Optimize complex multiplications of the
8936 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8937 argument EXPR represents the expression "z" of type TYPE. */
8940 fold_mult_zconjz (tree type, tree expr)
8942 tree itype = TREE_TYPE (type);
8943 tree rpart, ipart, tem;
8945 if (TREE_CODE (expr) == COMPLEX_EXPR)
8947 rpart = TREE_OPERAND (expr, 0);
8948 ipart = TREE_OPERAND (expr, 1);
8950 else if (TREE_CODE (expr) == COMPLEX_CST)
8952 rpart = TREE_REALPART (expr);
8953 ipart = TREE_IMAGPART (expr);
8957 expr = save_expr (expr);
8958 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8959 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8962 rpart = save_expr (rpart);
8963 ipart = save_expr (ipart);
8964 tem = fold_build2 (PLUS_EXPR, itype,
8965 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8966 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8967 return fold_build2 (COMPLEX_EXPR, type, tem,
8968 fold_convert (itype, integer_zero_node));
8972 /* Fold a binary expression of code CODE and type TYPE with operands
8973 OP0 and OP1. Return the folded expression if folding is
8974 successful. Otherwise, return NULL_TREE. */
8977 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8979 enum tree_code_class kind = TREE_CODE_CLASS (code);
8980 tree arg0, arg1, tem;
8981 tree t1 = NULL_TREE;
8982 bool strict_overflow_p;
8984 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8985 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8986 && TREE_CODE_LENGTH (code) == 2
8988 && op1 != NULL_TREE);
8993 /* Strip any conversions that don't change the mode. This is
8994 safe for every expression, except for a comparison expression
8995 because its signedness is derived from its operands. So, in
8996 the latter case, only strip conversions that don't change the
8999 Note that this is done as an internal manipulation within the
9000 constant folder, in order to find the simplest representation
9001 of the arguments so that their form can be studied. In any
9002 cases, the appropriate type conversions should be put back in
9003 the tree that will get out of the constant folder. */
9005 if (kind == tcc_comparison)
9007 STRIP_SIGN_NOPS (arg0);
9008 STRIP_SIGN_NOPS (arg1);
9016 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9017 constant but we can't do arithmetic on them. */
9018 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9019 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9020 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9021 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9023 if (kind == tcc_binary)
9024 tem = const_binop (code, arg0, arg1, 0);
9025 else if (kind == tcc_comparison)
9026 tem = fold_relational_const (code, type, arg0, arg1);
9030 if (tem != NULL_TREE)
9032 if (TREE_TYPE (tem) != type)
9033 tem = fold_convert (type, tem);
9038 /* If this is a commutative operation, and ARG0 is a constant, move it
9039 to ARG1 to reduce the number of tests below. */
9040 if (commutative_tree_code (code)
9041 && tree_swap_operands_p (arg0, arg1, true))
9042 return fold_build2 (code, type, op1, op0);
9044 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9046 First check for cases where an arithmetic operation is applied to a
9047 compound, conditional, or comparison operation. Push the arithmetic
9048 operation inside the compound or conditional to see if any folding
9049 can then be done. Convert comparison to conditional for this purpose.
9050 The also optimizes non-constant cases that used to be done in
9053 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9054 one of the operands is a comparison and the other is a comparison, a
9055 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9056 code below would make the expression more complex. Change it to a
9057 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9058 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9060 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9061 || code == EQ_EXPR || code == NE_EXPR)
9062 && ((truth_value_p (TREE_CODE (arg0))
9063 && (truth_value_p (TREE_CODE (arg1))
9064 || (TREE_CODE (arg1) == BIT_AND_EXPR
9065 && integer_onep (TREE_OPERAND (arg1, 1)))))
9066 || (truth_value_p (TREE_CODE (arg1))
9067 && (truth_value_p (TREE_CODE (arg0))
9068 || (TREE_CODE (arg0) == BIT_AND_EXPR
9069 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9071 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9072 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9075 fold_convert (boolean_type_node, arg0),
9076 fold_convert (boolean_type_node, arg1));
9078 if (code == EQ_EXPR)
9079 tem = invert_truthvalue (tem);
9081 return fold_convert (type, tem);
9084 if (TREE_CODE_CLASS (code) == tcc_binary
9085 || TREE_CODE_CLASS (code) == tcc_comparison)
9087 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9088 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9089 fold_build2 (code, type,
9090 TREE_OPERAND (arg0, 1), op1));
9091 if (TREE_CODE (arg1) == COMPOUND_EXPR
9092 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9093 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9094 fold_build2 (code, type,
9095 op0, TREE_OPERAND (arg1, 1)));
9097 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9099 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9101 /*cond_first_p=*/1);
9102 if (tem != NULL_TREE)
9106 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9108 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9110 /*cond_first_p=*/0);
9111 if (tem != NULL_TREE)
9119 /* A + (-B) -> A - B */
9120 if (TREE_CODE (arg1) == NEGATE_EXPR)
9121 return fold_build2 (MINUS_EXPR, type,
9122 fold_convert (type, arg0),
9123 fold_convert (type, TREE_OPERAND (arg1, 0)));
9124 /* (-A) + B -> B - A */
9125 if (TREE_CODE (arg0) == NEGATE_EXPR
9126 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9127 return fold_build2 (MINUS_EXPR, type,
9128 fold_convert (type, arg1),
9129 fold_convert (type, TREE_OPERAND (arg0, 0)));
9130 /* Convert ~A + 1 to -A. */
9131 if (INTEGRAL_TYPE_P (type)
9132 && TREE_CODE (arg0) == BIT_NOT_EXPR
9133 && integer_onep (arg1))
9134 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9136 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9138 if ((TREE_CODE (arg0) == MULT_EXPR
9139 || TREE_CODE (arg1) == MULT_EXPR)
9140 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9142 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9147 if (! FLOAT_TYPE_P (type))
9149 if (integer_zerop (arg1))
9150 return non_lvalue (fold_convert (type, arg0));
9153 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9154 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9155 && !TYPE_OVERFLOW_TRAPS (type))
9157 t1 = build_int_cst_type (type, -1);
9158 return omit_one_operand (type, t1, arg1);
9162 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9163 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9164 && !TYPE_OVERFLOW_TRAPS (type))
9166 t1 = build_int_cst_type (type, -1);
9167 return omit_one_operand (type, t1, arg0);
9170 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9171 with a constant, and the two constants have no bits in common,
9172 we should treat this as a BIT_IOR_EXPR since this may produce more
9174 if (TREE_CODE (arg0) == BIT_AND_EXPR
9175 && TREE_CODE (arg1) == BIT_AND_EXPR
9176 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9177 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9178 && integer_zerop (const_binop (BIT_AND_EXPR,
9179 TREE_OPERAND (arg0, 1),
9180 TREE_OPERAND (arg1, 1), 0)))
9182 code = BIT_IOR_EXPR;
9186 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9187 (plus (plus (mult) (mult)) (foo)) so that we can
9188 take advantage of the factoring cases below. */
9189 if (((TREE_CODE (arg0) == PLUS_EXPR
9190 || TREE_CODE (arg0) == MINUS_EXPR)
9191 && TREE_CODE (arg1) == MULT_EXPR)
9192 || ((TREE_CODE (arg1) == PLUS_EXPR
9193 || TREE_CODE (arg1) == MINUS_EXPR)
9194 && TREE_CODE (arg0) == MULT_EXPR))
9196 tree parg0, parg1, parg, marg;
9197 enum tree_code pcode;
9199 if (TREE_CODE (arg1) == MULT_EXPR)
9200 parg = arg0, marg = arg1;
9202 parg = arg1, marg = arg0;
9203 pcode = TREE_CODE (parg);
9204 parg0 = TREE_OPERAND (parg, 0);
9205 parg1 = TREE_OPERAND (parg, 1);
9209 if (TREE_CODE (parg0) == MULT_EXPR
9210 && TREE_CODE (parg1) != MULT_EXPR)
9211 return fold_build2 (pcode, type,
9212 fold_build2 (PLUS_EXPR, type,
9213 fold_convert (type, parg0),
9214 fold_convert (type, marg)),
9215 fold_convert (type, parg1));
9216 if (TREE_CODE (parg0) != MULT_EXPR
9217 && TREE_CODE (parg1) == MULT_EXPR)
9218 return fold_build2 (PLUS_EXPR, type,
9219 fold_convert (type, parg0),
9220 fold_build2 (pcode, type,
9221 fold_convert (type, marg),
9226 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9227 of the array. Loop optimizer sometimes produce this type of
9229 if (TREE_CODE (arg0) == ADDR_EXPR)
9231 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9233 return fold_convert (type, tem);
9235 else if (TREE_CODE (arg1) == ADDR_EXPR)
9237 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9239 return fold_convert (type, tem);
9244 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9245 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9246 return non_lvalue (fold_convert (type, arg0));
9248 /* Likewise if the operands are reversed. */
9249 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9250 return non_lvalue (fold_convert (type, arg1));
9252 /* Convert X + -C into X - C. */
9253 if (TREE_CODE (arg1) == REAL_CST
9254 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9256 tem = fold_negate_const (arg1, type);
9257 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9258 return fold_build2 (MINUS_EXPR, type,
9259 fold_convert (type, arg0),
9260 fold_convert (type, tem));
9263 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9264 to __complex__ ( x, y ). This is not the same for SNaNs or
9265 if signed zeros are involved. */
9266 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9267 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9268 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9270 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9271 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9272 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9273 bool arg0rz = false, arg0iz = false;
9274 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9275 || (arg0i && (arg0iz = real_zerop (arg0i))))
9277 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9278 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9279 if (arg0rz && arg1i && real_zerop (arg1i))
9281 tree rp = arg1r ? arg1r
9282 : build1 (REALPART_EXPR, rtype, arg1);
9283 tree ip = arg0i ? arg0i
9284 : build1 (IMAGPART_EXPR, rtype, arg0);
9285 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9287 else if (arg0iz && arg1r && real_zerop (arg1r))
9289 tree rp = arg0r ? arg0r
9290 : build1 (REALPART_EXPR, rtype, arg0);
9291 tree ip = arg1i ? arg1i
9292 : build1 (IMAGPART_EXPR, rtype, arg1);
9293 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9298 if (flag_unsafe_math_optimizations
9299 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9300 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9301 && (tem = distribute_real_division (code, type, arg0, arg1)))
9304 /* Convert x+x into x*2.0. */
9305 if (operand_equal_p (arg0, arg1, 0)
9306 && SCALAR_FLOAT_TYPE_P (type))
9307 return fold_build2 (MULT_EXPR, type, arg0,
9308 build_real (type, dconst2));
9310 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9311 if (flag_unsafe_math_optimizations
9312 && TREE_CODE (arg1) == PLUS_EXPR
9313 && TREE_CODE (arg0) != MULT_EXPR)
9315 tree tree10 = TREE_OPERAND (arg1, 0);
9316 tree tree11 = TREE_OPERAND (arg1, 1);
9317 if (TREE_CODE (tree11) == MULT_EXPR
9318 && TREE_CODE (tree10) == MULT_EXPR)
9321 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9322 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9325 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9326 if (flag_unsafe_math_optimizations
9327 && TREE_CODE (arg0) == PLUS_EXPR
9328 && TREE_CODE (arg1) != MULT_EXPR)
9330 tree tree00 = TREE_OPERAND (arg0, 0);
9331 tree tree01 = TREE_OPERAND (arg0, 1);
9332 if (TREE_CODE (tree01) == MULT_EXPR
9333 && TREE_CODE (tree00) == MULT_EXPR)
9336 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9337 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9343 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9344 is a rotate of A by C1 bits. */
9345 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9346 is a rotate of A by B bits. */
9348 enum tree_code code0, code1;
9349 code0 = TREE_CODE (arg0);
9350 code1 = TREE_CODE (arg1);
9351 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9352 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9353 && operand_equal_p (TREE_OPERAND (arg0, 0),
9354 TREE_OPERAND (arg1, 0), 0)
9355 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9357 tree tree01, tree11;
9358 enum tree_code code01, code11;
9360 tree01 = TREE_OPERAND (arg0, 1);
9361 tree11 = TREE_OPERAND (arg1, 1);
9362 STRIP_NOPS (tree01);
9363 STRIP_NOPS (tree11);
9364 code01 = TREE_CODE (tree01);
9365 code11 = TREE_CODE (tree11);
9366 if (code01 == INTEGER_CST
9367 && code11 == INTEGER_CST
9368 && TREE_INT_CST_HIGH (tree01) == 0
9369 && TREE_INT_CST_HIGH (tree11) == 0
9370 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9371 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9372 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9373 code0 == LSHIFT_EXPR ? tree01 : tree11);
9374 else if (code11 == MINUS_EXPR)
9376 tree tree110, tree111;
9377 tree110 = TREE_OPERAND (tree11, 0);
9378 tree111 = TREE_OPERAND (tree11, 1);
9379 STRIP_NOPS (tree110);
9380 STRIP_NOPS (tree111);
9381 if (TREE_CODE (tree110) == INTEGER_CST
9382 && 0 == compare_tree_int (tree110,
9384 (TREE_TYPE (TREE_OPERAND
9386 && operand_equal_p (tree01, tree111, 0))
9387 return build2 ((code0 == LSHIFT_EXPR
9390 type, TREE_OPERAND (arg0, 0), tree01);
9392 else if (code01 == MINUS_EXPR)
9394 tree tree010, tree011;
9395 tree010 = TREE_OPERAND (tree01, 0);
9396 tree011 = TREE_OPERAND (tree01, 1);
9397 STRIP_NOPS (tree010);
9398 STRIP_NOPS (tree011);
9399 if (TREE_CODE (tree010) == INTEGER_CST
9400 && 0 == compare_tree_int (tree010,
9402 (TREE_TYPE (TREE_OPERAND
9404 && operand_equal_p (tree11, tree011, 0))
9405 return build2 ((code0 != LSHIFT_EXPR
9408 type, TREE_OPERAND (arg0, 0), tree11);
9414 /* In most languages, can't associate operations on floats through
9415 parentheses. Rather than remember where the parentheses were, we
9416 don't associate floats at all, unless the user has specified
9417 -funsafe-math-optimizations. */
9419 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9421 tree var0, con0, lit0, minus_lit0;
9422 tree var1, con1, lit1, minus_lit1;
9425 /* Split both trees into variables, constants, and literals. Then
9426 associate each group together, the constants with literals,
9427 then the result with variables. This increases the chances of
9428 literals being recombined later and of generating relocatable
9429 expressions for the sum of a constant and literal. */
9430 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9431 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9432 code == MINUS_EXPR);
9434 /* With undefined overflow we can only associate constants
9435 with one variable. */
9436 if ((POINTER_TYPE_P (type)
9437 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9443 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9444 tmp0 = TREE_OPERAND (tmp0, 0);
9445 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9446 tmp1 = TREE_OPERAND (tmp1, 0);
9447 /* The only case we can still associate with two variables
9448 is if they are the same, modulo negation. */
9449 if (!operand_equal_p (tmp0, tmp1, 0))
9453 /* Only do something if we found more than two objects. Otherwise,
9454 nothing has changed and we risk infinite recursion. */
9456 && (2 < ((var0 != 0) + (var1 != 0)
9457 + (con0 != 0) + (con1 != 0)
9458 + (lit0 != 0) + (lit1 != 0)
9459 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9461 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9462 if (code == MINUS_EXPR)
9465 var0 = associate_trees (var0, var1, code, type);
9466 con0 = associate_trees (con0, con1, code, type);
9467 lit0 = associate_trees (lit0, lit1, code, type);
9468 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9470 /* Preserve the MINUS_EXPR if the negative part of the literal is
9471 greater than the positive part. Otherwise, the multiplicative
9472 folding code (i.e extract_muldiv) may be fooled in case
9473 unsigned constants are subtracted, like in the following
9474 example: ((X*2 + 4) - 8U)/2. */
9475 if (minus_lit0 && lit0)
9477 if (TREE_CODE (lit0) == INTEGER_CST
9478 && TREE_CODE (minus_lit0) == INTEGER_CST
9479 && tree_int_cst_lt (lit0, minus_lit0))
9481 minus_lit0 = associate_trees (minus_lit0, lit0,
9487 lit0 = associate_trees (lit0, minus_lit0,
9495 return fold_convert (type,
9496 associate_trees (var0, minus_lit0,
9500 con0 = associate_trees (con0, minus_lit0,
9502 return fold_convert (type,
9503 associate_trees (var0, con0,
9508 con0 = associate_trees (con0, lit0, code, type);
9509 return fold_convert (type, associate_trees (var0, con0,
9517 /* A - (-B) -> A + B */
9518 if (TREE_CODE (arg1) == NEGATE_EXPR)
9519 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9520 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9521 if (TREE_CODE (arg0) == NEGATE_EXPR
9522 && (FLOAT_TYPE_P (type)
9523 || INTEGRAL_TYPE_P (type))
9524 && negate_expr_p (arg1)
9525 && reorder_operands_p (arg0, arg1))
9526 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9527 TREE_OPERAND (arg0, 0));
9528 /* Convert -A - 1 to ~A. */
9529 if (INTEGRAL_TYPE_P (type)
9530 && TREE_CODE (arg0) == NEGATE_EXPR
9531 && integer_onep (arg1)
9532 && !TYPE_OVERFLOW_TRAPS (type))
9533 return fold_build1 (BIT_NOT_EXPR, type,
9534 fold_convert (type, TREE_OPERAND (arg0, 0)));
9536 /* Convert -1 - A to ~A. */
9537 if (INTEGRAL_TYPE_P (type)
9538 && integer_all_onesp (arg0))
9539 return fold_build1 (BIT_NOT_EXPR, type, op1);
9541 if (! FLOAT_TYPE_P (type))
9543 if (integer_zerop (arg0))
9544 return negate_expr (fold_convert (type, arg1));
9545 if (integer_zerop (arg1))
9546 return non_lvalue (fold_convert (type, arg0));
9548 /* Fold A - (A & B) into ~B & A. */
9549 if (!TREE_SIDE_EFFECTS (arg0)
9550 && TREE_CODE (arg1) == BIT_AND_EXPR)
9552 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9553 return fold_build2 (BIT_AND_EXPR, type,
9554 fold_build1 (BIT_NOT_EXPR, type,
9555 TREE_OPERAND (arg1, 0)),
9557 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9558 return fold_build2 (BIT_AND_EXPR, type,
9559 fold_build1 (BIT_NOT_EXPR, type,
9560 TREE_OPERAND (arg1, 1)),
9564 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9565 any power of 2 minus 1. */
9566 if (TREE_CODE (arg0) == BIT_AND_EXPR
9567 && TREE_CODE (arg1) == BIT_AND_EXPR
9568 && operand_equal_p (TREE_OPERAND (arg0, 0),
9569 TREE_OPERAND (arg1, 0), 0))
9571 tree mask0 = TREE_OPERAND (arg0, 1);
9572 tree mask1 = TREE_OPERAND (arg1, 1);
9573 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9575 if (operand_equal_p (tem, mask1, 0))
9577 tem = fold_build2 (BIT_XOR_EXPR, type,
9578 TREE_OPERAND (arg0, 0), mask1);
9579 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9584 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9585 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9586 return non_lvalue (fold_convert (type, arg0));
9588 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9589 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9590 (-ARG1 + ARG0) reduces to -ARG1. */
9591 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9592 return negate_expr (fold_convert (type, arg1));
9594 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9595 __complex__ ( x, -y ). This is not the same for SNaNs or if
9596 signed zeros are involved. */
9597 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9599 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9601 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9602 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9603 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9604 bool arg0rz = false, arg0iz = false;
9605 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9606 || (arg0i && (arg0iz = real_zerop (arg0i))))
9608 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9609 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9610 if (arg0rz && arg1i && real_zerop (arg1i))
9612 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9614 : build1 (REALPART_EXPR, rtype, arg1));
9615 tree ip = arg0i ? arg0i
9616 : build1 (IMAGPART_EXPR, rtype, arg0);
9617 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9619 else if (arg0iz && arg1r && real_zerop (arg1r))
9621 tree rp = arg0r ? arg0r
9622 : build1 (REALPART_EXPR, rtype, arg0);
9623 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9625 : build1 (IMAGPART_EXPR, rtype, arg1));
9626 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9631 /* Fold &x - &x. This can happen from &x.foo - &x.
9632 This is unsafe for certain floats even in non-IEEE formats.
9633 In IEEE, it is unsafe because it does wrong for NaNs.
9634 Also note that operand_equal_p is always false if an operand
9637 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9638 && operand_equal_p (arg0, arg1, 0))
9639 return fold_convert (type, integer_zero_node);
9641 /* A - B -> A + (-B) if B is easily negatable. */
9642 if (negate_expr_p (arg1)
9643 && ((FLOAT_TYPE_P (type)
9644 /* Avoid this transformation if B is a positive REAL_CST. */
9645 && (TREE_CODE (arg1) != REAL_CST
9646 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9647 || INTEGRAL_TYPE_P (type)))
9648 return fold_build2 (PLUS_EXPR, type,
9649 fold_convert (type, arg0),
9650 fold_convert (type, negate_expr (arg1)));
9652 /* Try folding difference of addresses. */
9656 if ((TREE_CODE (arg0) == ADDR_EXPR
9657 || TREE_CODE (arg1) == ADDR_EXPR)
9658 && ptr_difference_const (arg0, arg1, &diff))
9659 return build_int_cst_type (type, diff);
9662 /* Fold &a[i] - &a[j] to i-j. */
9663 if (TREE_CODE (arg0) == ADDR_EXPR
9664 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9665 && TREE_CODE (arg1) == ADDR_EXPR
9666 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9668 tree aref0 = TREE_OPERAND (arg0, 0);
9669 tree aref1 = TREE_OPERAND (arg1, 0);
9670 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9671 TREE_OPERAND (aref1, 0), 0))
9673 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9674 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9675 tree esz = array_ref_element_size (aref0);
9676 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9677 return fold_build2 (MULT_EXPR, type, diff,
9678 fold_convert (type, esz));
9683 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9684 of the array. Loop optimizer sometimes produce this type of
9686 if (TREE_CODE (arg0) == ADDR_EXPR)
9688 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9690 return fold_convert (type, tem);
9693 if (flag_unsafe_math_optimizations
9694 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9695 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9696 && (tem = distribute_real_division (code, type, arg0, arg1)))
9699 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9701 if ((TREE_CODE (arg0) == MULT_EXPR
9702 || TREE_CODE (arg1) == MULT_EXPR)
9703 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9705 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9713 /* (-A) * (-B) -> A * B */
9714 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9715 return fold_build2 (MULT_EXPR, type,
9716 fold_convert (type, TREE_OPERAND (arg0, 0)),
9717 fold_convert (type, negate_expr (arg1)));
9718 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9719 return fold_build2 (MULT_EXPR, type,
9720 fold_convert (type, negate_expr (arg0)),
9721 fold_convert (type, TREE_OPERAND (arg1, 0)));
9723 if (! FLOAT_TYPE_P (type))
9725 if (integer_zerop (arg1))
9726 return omit_one_operand (type, arg1, arg0);
9727 if (integer_onep (arg1))
9728 return non_lvalue (fold_convert (type, arg0));
9729 /* Transform x * -1 into -x. */
9730 if (integer_all_onesp (arg1))
9731 return fold_convert (type, negate_expr (arg0));
9732 /* Transform x * -C into -x * C if x is easily negatable. */
9733 if (TREE_CODE (arg1) == INTEGER_CST
9734 && tree_int_cst_sgn (arg1) == -1
9735 && negate_expr_p (arg0)
9736 && (tem = negate_expr (arg1)) != arg1
9737 && !TREE_OVERFLOW (tem))
9738 return fold_build2 (MULT_EXPR, type,
9739 negate_expr (arg0), tem);
9741 /* (a * (1 << b)) is (a << b) */
9742 if (TREE_CODE (arg1) == LSHIFT_EXPR
9743 && integer_onep (TREE_OPERAND (arg1, 0)))
9744 return fold_build2 (LSHIFT_EXPR, type, arg0,
9745 TREE_OPERAND (arg1, 1));
9746 if (TREE_CODE (arg0) == LSHIFT_EXPR
9747 && integer_onep (TREE_OPERAND (arg0, 0)))
9748 return fold_build2 (LSHIFT_EXPR, type, arg1,
9749 TREE_OPERAND (arg0, 1));
9751 strict_overflow_p = false;
9752 if (TREE_CODE (arg1) == INTEGER_CST
9753 && 0 != (tem = extract_muldiv (op0,
9754 fold_convert (type, arg1),
9756 &strict_overflow_p)))
9758 if (strict_overflow_p)
9759 fold_overflow_warning (("assuming signed overflow does not "
9760 "occur when simplifying "
9762 WARN_STRICT_OVERFLOW_MISC);
9763 return fold_convert (type, tem);
9766 /* Optimize z * conj(z) for integer complex numbers. */
9767 if (TREE_CODE (arg0) == CONJ_EXPR
9768 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9769 return fold_mult_zconjz (type, arg1);
9770 if (TREE_CODE (arg1) == CONJ_EXPR
9771 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9772 return fold_mult_zconjz (type, arg0);
9776 /* Maybe fold x * 0 to 0. The expressions aren't the same
9777 when x is NaN, since x * 0 is also NaN. Nor are they the
9778 same in modes with signed zeros, since multiplying a
9779 negative value by 0 gives -0, not +0. */
9780 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9781 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9782 && real_zerop (arg1))
9783 return omit_one_operand (type, arg1, arg0);
9784 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9785 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9786 && real_onep (arg1))
9787 return non_lvalue (fold_convert (type, arg0));
9789 /* Transform x * -1.0 into -x. */
9790 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9791 && real_minus_onep (arg1))
9792 return fold_convert (type, negate_expr (arg0));
9794 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9795 if (flag_unsafe_math_optimizations
9796 && TREE_CODE (arg0) == RDIV_EXPR
9797 && TREE_CODE (arg1) == REAL_CST
9798 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9800 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9803 return fold_build2 (RDIV_EXPR, type, tem,
9804 TREE_OPERAND (arg0, 1));
9807 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9808 if (operand_equal_p (arg0, arg1, 0))
9810 tree tem = fold_strip_sign_ops (arg0);
9811 if (tem != NULL_TREE)
9813 tem = fold_convert (type, tem);
9814 return fold_build2 (MULT_EXPR, type, tem, tem);
9818 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9819 This is not the same for NaNs or if signed zeros are
9821 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9822 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9823 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9824 && TREE_CODE (arg1) == COMPLEX_CST
9825 && real_zerop (TREE_REALPART (arg1)))
9827 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9828 if (real_onep (TREE_IMAGPART (arg1)))
9829 return fold_build2 (COMPLEX_EXPR, type,
9830 negate_expr (fold_build1 (IMAGPART_EXPR,
9832 fold_build1 (REALPART_EXPR, rtype, arg0));
9833 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9834 return fold_build2 (COMPLEX_EXPR, type,
9835 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9836 negate_expr (fold_build1 (REALPART_EXPR,
9840 /* Optimize z * conj(z) for floating point complex numbers.
9841 Guarded by flag_unsafe_math_optimizations as non-finite
9842 imaginary components don't produce scalar results. */
9843 if (flag_unsafe_math_optimizations
9844 && TREE_CODE (arg0) == CONJ_EXPR
9845 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9846 return fold_mult_zconjz (type, arg1);
9847 if (flag_unsafe_math_optimizations
9848 && TREE_CODE (arg1) == CONJ_EXPR
9849 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9850 return fold_mult_zconjz (type, arg0);
9852 if (flag_unsafe_math_optimizations)
9854 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9855 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9857 /* Optimizations of root(...)*root(...). */
9858 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9861 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9862 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9864 /* Optimize sqrt(x)*sqrt(x) as x. */
9865 if (BUILTIN_SQRT_P (fcode0)
9866 && operand_equal_p (arg00, arg10, 0)
9867 && ! HONOR_SNANS (TYPE_MODE (type)))
9870 /* Optimize root(x)*root(y) as root(x*y). */
9871 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9872 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9873 return build_call_expr (rootfn, 1, arg);
9876 /* Optimize expN(x)*expN(y) as expN(x+y). */
9877 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9879 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9880 tree arg = fold_build2 (PLUS_EXPR, type,
9881 CALL_EXPR_ARG (arg0, 0),
9882 CALL_EXPR_ARG (arg1, 0));
9883 return build_call_expr (expfn, 1, arg);
9886 /* Optimizations of pow(...)*pow(...). */
9887 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9888 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9889 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9891 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9892 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9893 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9894 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9896 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9897 if (operand_equal_p (arg01, arg11, 0))
9899 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9900 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9901 return build_call_expr (powfn, 2, arg, arg01);
9904 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9905 if (operand_equal_p (arg00, arg10, 0))
9907 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9908 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9909 return build_call_expr (powfn, 2, arg00, arg);
9913 /* Optimize tan(x)*cos(x) as sin(x). */
9914 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9915 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9916 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9917 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9918 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9919 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9920 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9921 CALL_EXPR_ARG (arg1, 0), 0))
9923 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9925 if (sinfn != NULL_TREE)
9926 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9929 /* Optimize x*pow(x,c) as pow(x,c+1). */
9930 if (fcode1 == BUILT_IN_POW
9931 || fcode1 == BUILT_IN_POWF
9932 || fcode1 == BUILT_IN_POWL)
9934 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9935 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9936 if (TREE_CODE (arg11) == REAL_CST
9937 && !TREE_OVERFLOW (arg11)
9938 && operand_equal_p (arg0, arg10, 0))
9940 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9944 c = TREE_REAL_CST (arg11);
9945 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9946 arg = build_real (type, c);
9947 return build_call_expr (powfn, 2, arg0, arg);
9951 /* Optimize pow(x,c)*x as pow(x,c+1). */
9952 if (fcode0 == BUILT_IN_POW
9953 || fcode0 == BUILT_IN_POWF
9954 || fcode0 == BUILT_IN_POWL)
9956 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9957 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9958 if (TREE_CODE (arg01) == REAL_CST
9959 && !TREE_OVERFLOW (arg01)
9960 && operand_equal_p (arg1, arg00, 0))
9962 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9966 c = TREE_REAL_CST (arg01);
9967 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9968 arg = build_real (type, c);
9969 return build_call_expr (powfn, 2, arg1, arg);
9973 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9975 && operand_equal_p (arg0, arg1, 0))
9977 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9981 tree arg = build_real (type, dconst2);
9982 return build_call_expr (powfn, 2, arg0, arg);
9991 if (integer_all_onesp (arg1))
9992 return omit_one_operand (type, arg1, arg0);
9993 if (integer_zerop (arg1))
9994 return non_lvalue (fold_convert (type, arg0));
9995 if (operand_equal_p (arg0, arg1, 0))
9996 return non_lvalue (fold_convert (type, arg0));
9999 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10000 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10002 t1 = build_int_cst_type (type, -1);
10003 return omit_one_operand (type, t1, arg1);
10006 /* X | ~X is -1. */
10007 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10008 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10010 t1 = build_int_cst_type (type, -1);
10011 return omit_one_operand (type, t1, arg0);
10014 /* Canonicalize (X & C1) | C2. */
10015 if (TREE_CODE (arg0) == BIT_AND_EXPR
10016 && TREE_CODE (arg1) == INTEGER_CST
10017 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10019 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10020 int width = TYPE_PRECISION (type);
10021 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10022 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10023 hi2 = TREE_INT_CST_HIGH (arg1);
10024 lo2 = TREE_INT_CST_LOW (arg1);
10026 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10027 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10028 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10030 if (width > HOST_BITS_PER_WIDE_INT)
10032 mhi = (unsigned HOST_WIDE_INT) -1
10033 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10039 mlo = (unsigned HOST_WIDE_INT) -1
10040 >> (HOST_BITS_PER_WIDE_INT - width);
10043 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10044 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10045 return fold_build2 (BIT_IOR_EXPR, type,
10046 TREE_OPERAND (arg0, 0), arg1);
10048 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10051 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10052 return fold_build2 (BIT_IOR_EXPR, type,
10053 fold_build2 (BIT_AND_EXPR, type,
10054 TREE_OPERAND (arg0, 0),
10055 build_int_cst_wide (type,
10061 /* (X & Y) | Y is (X, Y). */
10062 if (TREE_CODE (arg0) == BIT_AND_EXPR
10063 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10064 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10065 /* (X & Y) | X is (Y, X). */
10066 if (TREE_CODE (arg0) == BIT_AND_EXPR
10067 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10068 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10069 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10070 /* X | (X & Y) is (Y, X). */
10071 if (TREE_CODE (arg1) == BIT_AND_EXPR
10072 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10073 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10074 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10075 /* X | (Y & X) is (Y, X). */
10076 if (TREE_CODE (arg1) == BIT_AND_EXPR
10077 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10078 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10079 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10081 t1 = distribute_bit_expr (code, type, arg0, arg1);
10082 if (t1 != NULL_TREE)
10085 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10087 This results in more efficient code for machines without a NAND
10088 instruction. Combine will canonicalize to the first form
10089 which will allow use of NAND instructions provided by the
10090 backend if they exist. */
10091 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10092 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10094 return fold_build1 (BIT_NOT_EXPR, type,
10095 build2 (BIT_AND_EXPR, type,
10096 TREE_OPERAND (arg0, 0),
10097 TREE_OPERAND (arg1, 0)));
10100 /* See if this can be simplified into a rotate first. If that
10101 is unsuccessful continue in the association code. */
10105 if (integer_zerop (arg1))
10106 return non_lvalue (fold_convert (type, arg0));
10107 if (integer_all_onesp (arg1))
10108 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10109 if (operand_equal_p (arg0, arg1, 0))
10110 return omit_one_operand (type, integer_zero_node, arg0);
10112 /* ~X ^ X is -1. */
10113 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10114 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10116 t1 = build_int_cst_type (type, -1);
10117 return omit_one_operand (type, t1, arg1);
10120 /* X ^ ~X is -1. */
10121 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10122 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10124 t1 = build_int_cst_type (type, -1);
10125 return omit_one_operand (type, t1, arg0);
10128 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10129 with a constant, and the two constants have no bits in common,
10130 we should treat this as a BIT_IOR_EXPR since this may produce more
10131 simplifications. */
10132 if (TREE_CODE (arg0) == BIT_AND_EXPR
10133 && TREE_CODE (arg1) == BIT_AND_EXPR
10134 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10135 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10136 && integer_zerop (const_binop (BIT_AND_EXPR,
10137 TREE_OPERAND (arg0, 1),
10138 TREE_OPERAND (arg1, 1), 0)))
10140 code = BIT_IOR_EXPR;
10144 /* (X | Y) ^ X -> Y & ~ X*/
10145 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10146 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10148 tree t2 = TREE_OPERAND (arg0, 1);
10149 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10151 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10152 fold_convert (type, t1));
10156 /* (Y | X) ^ X -> Y & ~ X*/
10157 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10158 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10160 tree t2 = TREE_OPERAND (arg0, 0);
10161 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10163 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10164 fold_convert (type, t1));
10168 /* X ^ (X | Y) -> Y & ~ X*/
10169 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10170 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10172 tree t2 = TREE_OPERAND (arg1, 1);
10173 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10175 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10176 fold_convert (type, t1));
10180 /* X ^ (Y | X) -> Y & ~ X*/
10181 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10182 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10184 tree t2 = TREE_OPERAND (arg1, 0);
10185 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10187 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10188 fold_convert (type, t1));
10192 /* Convert ~X ^ ~Y to X ^ Y. */
10193 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10194 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10195 return fold_build2 (code, type,
10196 fold_convert (type, TREE_OPERAND (arg0, 0)),
10197 fold_convert (type, TREE_OPERAND (arg1, 0)));
10199 /* Convert ~X ^ C to X ^ ~C. */
10200 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10201 && TREE_CODE (arg1) == INTEGER_CST)
10202 return fold_build2 (code, type,
10203 fold_convert (type, TREE_OPERAND (arg0, 0)),
10204 fold_build1 (BIT_NOT_EXPR, type, arg1));
10206 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10207 if (TREE_CODE (arg0) == BIT_AND_EXPR
10208 && integer_onep (TREE_OPERAND (arg0, 1))
10209 && integer_onep (arg1))
10210 return fold_build2 (EQ_EXPR, type, arg0,
10211 build_int_cst (TREE_TYPE (arg0), 0));
10213 /* Fold (X & Y) ^ Y as ~X & Y. */
10214 if (TREE_CODE (arg0) == BIT_AND_EXPR
10215 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10217 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10218 return fold_build2 (BIT_AND_EXPR, type,
10219 fold_build1 (BIT_NOT_EXPR, type, tem),
10220 fold_convert (type, arg1));
10222 /* Fold (X & Y) ^ X as ~Y & X. */
10223 if (TREE_CODE (arg0) == BIT_AND_EXPR
10224 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10225 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10227 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10228 return fold_build2 (BIT_AND_EXPR, type,
10229 fold_build1 (BIT_NOT_EXPR, type, tem),
10230 fold_convert (type, arg1));
10232 /* Fold X ^ (X & Y) as X & ~Y. */
10233 if (TREE_CODE (arg1) == BIT_AND_EXPR
10234 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10236 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10237 return fold_build2 (BIT_AND_EXPR, type,
10238 fold_convert (type, arg0),
10239 fold_build1 (BIT_NOT_EXPR, type, tem));
10241 /* Fold X ^ (Y & X) as ~Y & X. */
10242 if (TREE_CODE (arg1) == BIT_AND_EXPR
10243 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10244 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10246 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10247 return fold_build2 (BIT_AND_EXPR, type,
10248 fold_build1 (BIT_NOT_EXPR, type, tem),
10249 fold_convert (type, arg0));
10252 /* See if this can be simplified into a rotate first. If that
10253 is unsuccessful continue in the association code. */
10257 if (integer_all_onesp (arg1))
10258 return non_lvalue (fold_convert (type, arg0));
10259 if (integer_zerop (arg1))
10260 return omit_one_operand (type, arg1, arg0);
10261 if (operand_equal_p (arg0, arg1, 0))
10262 return non_lvalue (fold_convert (type, arg0));
10264 /* ~X & X is always zero. */
10265 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10266 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10267 return omit_one_operand (type, integer_zero_node, arg1);
10269 /* X & ~X is always zero. */
10270 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10271 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10272 return omit_one_operand (type, integer_zero_node, arg0);
10274 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10275 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10276 && TREE_CODE (arg1) == INTEGER_CST
10277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10278 return fold_build2 (BIT_IOR_EXPR, type,
10279 fold_build2 (BIT_AND_EXPR, type,
10280 TREE_OPERAND (arg0, 0), arg1),
10281 fold_build2 (BIT_AND_EXPR, type,
10282 TREE_OPERAND (arg0, 1), arg1));
10284 /* (X | Y) & Y is (X, Y). */
10285 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10286 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10287 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10288 /* (X | Y) & X is (Y, X). */
10289 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10290 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10291 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10292 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10293 /* X & (X | Y) is (Y, X). */
10294 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10295 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10296 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10297 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10298 /* X & (Y | X) is (Y, X). */
10299 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10300 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10301 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10302 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10304 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10305 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10306 && integer_onep (TREE_OPERAND (arg0, 1))
10307 && integer_onep (arg1))
10309 tem = TREE_OPERAND (arg0, 0);
10310 return fold_build2 (EQ_EXPR, type,
10311 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10312 build_int_cst (TREE_TYPE (tem), 1)),
10313 build_int_cst (TREE_TYPE (tem), 0));
10315 /* Fold ~X & 1 as (X & 1) == 0. */
10316 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10317 && integer_onep (arg1))
10319 tem = TREE_OPERAND (arg0, 0);
10320 return fold_build2 (EQ_EXPR, type,
10321 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10322 build_int_cst (TREE_TYPE (tem), 1)),
10323 build_int_cst (TREE_TYPE (tem), 0));
10326 /* Fold (X ^ Y) & Y as ~X & Y. */
10327 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10328 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10330 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10331 return fold_build2 (BIT_AND_EXPR, type,
10332 fold_build1 (BIT_NOT_EXPR, type, tem),
10333 fold_convert (type, arg1));
10335 /* Fold (X ^ Y) & X as ~Y & X. */
10336 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10337 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10338 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10340 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10341 return fold_build2 (BIT_AND_EXPR, type,
10342 fold_build1 (BIT_NOT_EXPR, type, tem),
10343 fold_convert (type, arg1));
10345 /* Fold X & (X ^ Y) as X & ~Y. */
10346 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10347 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10349 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10350 return fold_build2 (BIT_AND_EXPR, type,
10351 fold_convert (type, arg0),
10352 fold_build1 (BIT_NOT_EXPR, type, tem));
10354 /* Fold X & (Y ^ X) as ~Y & X. */
10355 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10356 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10357 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10359 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10360 return fold_build2 (BIT_AND_EXPR, type,
10361 fold_build1 (BIT_NOT_EXPR, type, tem),
10362 fold_convert (type, arg0));
10365 t1 = distribute_bit_expr (code, type, arg0, arg1);
10366 if (t1 != NULL_TREE)
10368 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10369 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10370 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10373 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10375 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10376 && (~TREE_INT_CST_LOW (arg1)
10377 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10378 return fold_convert (type, TREE_OPERAND (arg0, 0));
10381 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10383 This results in more efficient code for machines without a NOR
10384 instruction. Combine will canonicalize to the first form
10385 which will allow use of NOR instructions provided by the
10386 backend if they exist. */
10387 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10388 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10390 return fold_build1 (BIT_NOT_EXPR, type,
10391 build2 (BIT_IOR_EXPR, type,
10392 TREE_OPERAND (arg0, 0),
10393 TREE_OPERAND (arg1, 0)));
10399 /* Don't touch a floating-point divide by zero unless the mode
10400 of the constant can represent infinity. */
10401 if (TREE_CODE (arg1) == REAL_CST
10402 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10403 && real_zerop (arg1))
10406 /* Optimize A / A to 1.0 if we don't care about
10407 NaNs or Infinities. Skip the transformation
10408 for non-real operands. */
10409 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10410 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10411 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10412 && operand_equal_p (arg0, arg1, 0))
10414 tree r = build_real (TREE_TYPE (arg0), dconst1);
10416 return omit_two_operands (type, r, arg0, arg1);
10419 /* The complex version of the above A / A optimization. */
10420 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10421 && operand_equal_p (arg0, arg1, 0))
10423 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10424 if (! HONOR_NANS (TYPE_MODE (elem_type))
10425 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10427 tree r = build_real (elem_type, dconst1);
10428 /* omit_two_operands will call fold_convert for us. */
10429 return omit_two_operands (type, r, arg0, arg1);
10433 /* (-A) / (-B) -> A / B */
10434 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10435 return fold_build2 (RDIV_EXPR, type,
10436 TREE_OPERAND (arg0, 0),
10437 negate_expr (arg1));
10438 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10439 return fold_build2 (RDIV_EXPR, type,
10440 negate_expr (arg0),
10441 TREE_OPERAND (arg1, 0));
10443 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10444 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10445 && real_onep (arg1))
10446 return non_lvalue (fold_convert (type, arg0));
10448 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10449 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10450 && real_minus_onep (arg1))
10451 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10453 /* If ARG1 is a constant, we can convert this to a multiply by the
10454 reciprocal. This does not have the same rounding properties,
10455 so only do this if -funsafe-math-optimizations. We can actually
10456 always safely do it if ARG1 is a power of two, but it's hard to
10457 tell if it is or not in a portable manner. */
10458 if (TREE_CODE (arg1) == REAL_CST)
10460 if (flag_unsafe_math_optimizations
10461 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10463 return fold_build2 (MULT_EXPR, type, arg0, tem);
10464 /* Find the reciprocal if optimizing and the result is exact. */
10468 r = TREE_REAL_CST (arg1);
10469 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10471 tem = build_real (type, r);
10472 return fold_build2 (MULT_EXPR, type,
10473 fold_convert (type, arg0), tem);
10477 /* Convert A/B/C to A/(B*C). */
10478 if (flag_unsafe_math_optimizations
10479 && TREE_CODE (arg0) == RDIV_EXPR)
10480 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10481 fold_build2 (MULT_EXPR, type,
10482 TREE_OPERAND (arg0, 1), arg1));
10484 /* Convert A/(B/C) to (A/B)*C. */
10485 if (flag_unsafe_math_optimizations
10486 && TREE_CODE (arg1) == RDIV_EXPR)
10487 return fold_build2 (MULT_EXPR, type,
10488 fold_build2 (RDIV_EXPR, type, arg0,
10489 TREE_OPERAND (arg1, 0)),
10490 TREE_OPERAND (arg1, 1));
10492 /* Convert C1/(X*C2) into (C1/C2)/X. */
10493 if (flag_unsafe_math_optimizations
10494 && TREE_CODE (arg1) == MULT_EXPR
10495 && TREE_CODE (arg0) == REAL_CST
10496 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10498 tree tem = const_binop (RDIV_EXPR, arg0,
10499 TREE_OPERAND (arg1, 1), 0);
10501 return fold_build2 (RDIV_EXPR, type, tem,
10502 TREE_OPERAND (arg1, 0));
10505 if (flag_unsafe_math_optimizations)
10507 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10508 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10510 /* Optimize sin(x)/cos(x) as tan(x). */
10511 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10512 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10513 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10514 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10515 CALL_EXPR_ARG (arg1, 0), 0))
10517 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10519 if (tanfn != NULL_TREE)
10520 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10523 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10524 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10525 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10526 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10527 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10528 CALL_EXPR_ARG (arg1, 0), 0))
10530 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10532 if (tanfn != NULL_TREE)
10534 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10535 return fold_build2 (RDIV_EXPR, type,
10536 build_real (type, dconst1), tmp);
10540 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10541 NaNs or Infinities. */
10542 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10543 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10544 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10546 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10547 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10549 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10550 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10551 && operand_equal_p (arg00, arg01, 0))
10553 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10555 if (cosfn != NULL_TREE)
10556 return build_call_expr (cosfn, 1, arg00);
10560 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10561 NaNs or Infinities. */
10562 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10563 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10564 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10566 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10567 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10569 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10570 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10571 && operand_equal_p (arg00, arg01, 0))
10573 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10575 if (cosfn != NULL_TREE)
10577 tree tmp = build_call_expr (cosfn, 1, arg00);
10578 return fold_build2 (RDIV_EXPR, type,
10579 build_real (type, dconst1),
10585 /* Optimize pow(x,c)/x as pow(x,c-1). */
10586 if (fcode0 == BUILT_IN_POW
10587 || fcode0 == BUILT_IN_POWF
10588 || fcode0 == BUILT_IN_POWL)
10590 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10591 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10592 if (TREE_CODE (arg01) == REAL_CST
10593 && !TREE_OVERFLOW (arg01)
10594 && operand_equal_p (arg1, arg00, 0))
10596 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10600 c = TREE_REAL_CST (arg01);
10601 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10602 arg = build_real (type, c);
10603 return build_call_expr (powfn, 2, arg1, arg);
10607 /* Optimize x/expN(y) into x*expN(-y). */
10608 if (BUILTIN_EXPONENT_P (fcode1))
10610 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10611 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10612 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10613 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10616 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10617 if (fcode1 == BUILT_IN_POW
10618 || fcode1 == BUILT_IN_POWF
10619 || fcode1 == BUILT_IN_POWL)
10621 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10622 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10623 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10624 tree neg11 = fold_convert (type, negate_expr (arg11));
10625 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10626 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10631 case TRUNC_DIV_EXPR:
10632 case FLOOR_DIV_EXPR:
10633 /* Simplify A / (B << N) where A and B are positive and B is
10634 a power of 2, to A >> (N + log2(B)). */
10635 strict_overflow_p = false;
10636 if (TREE_CODE (arg1) == LSHIFT_EXPR
10637 && (TYPE_UNSIGNED (type)
10638 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10640 tree sval = TREE_OPERAND (arg1, 0);
10641 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10643 tree sh_cnt = TREE_OPERAND (arg1, 1);
10644 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10646 if (strict_overflow_p)
10647 fold_overflow_warning (("assuming signed overflow does not "
10648 "occur when simplifying A / (B << N)"),
10649 WARN_STRICT_OVERFLOW_MISC);
10651 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10652 sh_cnt, build_int_cst (NULL_TREE, pow2));
10653 return fold_build2 (RSHIFT_EXPR, type,
10654 fold_convert (type, arg0), sh_cnt);
10659 case ROUND_DIV_EXPR:
10660 case CEIL_DIV_EXPR:
10661 case EXACT_DIV_EXPR:
10662 if (integer_onep (arg1))
10663 return non_lvalue (fold_convert (type, arg0));
10664 if (integer_zerop (arg1))
10666 /* X / -1 is -X. */
10667 if (!TYPE_UNSIGNED (type)
10668 && TREE_CODE (arg1) == INTEGER_CST
10669 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10670 && TREE_INT_CST_HIGH (arg1) == -1)
10671 return fold_convert (type, negate_expr (arg0));
10673 /* Convert -A / -B to A / B when the type is signed and overflow is
10675 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10676 && TREE_CODE (arg0) == NEGATE_EXPR
10677 && negate_expr_p (arg1))
10679 if (INTEGRAL_TYPE_P (type))
10680 fold_overflow_warning (("assuming signed overflow does not occur "
10681 "when distributing negation across "
10683 WARN_STRICT_OVERFLOW_MISC);
10684 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10685 negate_expr (arg1));
10687 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10688 && TREE_CODE (arg1) == NEGATE_EXPR
10689 && negate_expr_p (arg0))
10691 if (INTEGRAL_TYPE_P (type))
10692 fold_overflow_warning (("assuming signed overflow does not occur "
10693 "when distributing negation across "
10695 WARN_STRICT_OVERFLOW_MISC);
10696 return fold_build2 (code, type, negate_expr (arg0),
10697 TREE_OPERAND (arg1, 0));
10700 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10701 operation, EXACT_DIV_EXPR.
10703 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10704 At one time others generated faster code, it's not clear if they do
10705 after the last round to changes to the DIV code in expmed.c. */
10706 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10707 && multiple_of_p (type, arg0, arg1))
10708 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10710 strict_overflow_p = false;
10711 if (TREE_CODE (arg1) == INTEGER_CST
10712 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10713 &strict_overflow_p)))
10715 if (strict_overflow_p)
10716 fold_overflow_warning (("assuming signed overflow does not occur "
10717 "when simplifying division"),
10718 WARN_STRICT_OVERFLOW_MISC);
10719 return fold_convert (type, tem);
10724 case CEIL_MOD_EXPR:
10725 case FLOOR_MOD_EXPR:
10726 case ROUND_MOD_EXPR:
10727 case TRUNC_MOD_EXPR:
10728 /* X % 1 is always zero, but be sure to preserve any side
10730 if (integer_onep (arg1))
10731 return omit_one_operand (type, integer_zero_node, arg0);
10733 /* X % 0, return X % 0 unchanged so that we can get the
10734 proper warnings and errors. */
10735 if (integer_zerop (arg1))
10738 /* 0 % X is always zero, but be sure to preserve any side
10739 effects in X. Place this after checking for X == 0. */
10740 if (integer_zerop (arg0))
10741 return omit_one_operand (type, integer_zero_node, arg1);
10743 /* X % -1 is zero. */
10744 if (!TYPE_UNSIGNED (type)
10745 && TREE_CODE (arg1) == INTEGER_CST
10746 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10747 && TREE_INT_CST_HIGH (arg1) == -1)
10748 return omit_one_operand (type, integer_zero_node, arg0);
10750 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10751 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10752 strict_overflow_p = false;
10753 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10754 && (TYPE_UNSIGNED (type)
10755 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10758 /* Also optimize A % (C << N) where C is a power of 2,
10759 to A & ((C << N) - 1). */
10760 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10761 c = TREE_OPERAND (arg1, 0);
10763 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10765 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10766 build_int_cst (TREE_TYPE (arg1), 1));
10767 if (strict_overflow_p)
10768 fold_overflow_warning (("assuming signed overflow does not "
10769 "occur when simplifying "
10770 "X % (power of two)"),
10771 WARN_STRICT_OVERFLOW_MISC);
10772 return fold_build2 (BIT_AND_EXPR, type,
10773 fold_convert (type, arg0),
10774 fold_convert (type, mask));
10778 /* X % -C is the same as X % C. */
10779 if (code == TRUNC_MOD_EXPR
10780 && !TYPE_UNSIGNED (type)
10781 && TREE_CODE (arg1) == INTEGER_CST
10782 && !TREE_OVERFLOW (arg1)
10783 && TREE_INT_CST_HIGH (arg1) < 0
10784 && !TYPE_OVERFLOW_TRAPS (type)
10785 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10786 && !sign_bit_p (arg1, arg1))
10787 return fold_build2 (code, type, fold_convert (type, arg0),
10788 fold_convert (type, negate_expr (arg1)));
10790 /* X % -Y is the same as X % Y. */
10791 if (code == TRUNC_MOD_EXPR
10792 && !TYPE_UNSIGNED (type)
10793 && TREE_CODE (arg1) == NEGATE_EXPR
10794 && !TYPE_OVERFLOW_TRAPS (type))
10795 return fold_build2 (code, type, fold_convert (type, arg0),
10796 fold_convert (type, TREE_OPERAND (arg1, 0)));
10798 if (TREE_CODE (arg1) == INTEGER_CST
10799 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10800 &strict_overflow_p)))
10802 if (strict_overflow_p)
10803 fold_overflow_warning (("assuming signed overflow does not occur "
10804 "when simplifying modulos"),
10805 WARN_STRICT_OVERFLOW_MISC);
10806 return fold_convert (type, tem);
10813 if (integer_all_onesp (arg0))
10814 return omit_one_operand (type, arg0, arg1);
10818 /* Optimize -1 >> x for arithmetic right shifts. */
10819 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10820 return omit_one_operand (type, arg0, arg1);
10821 /* ... fall through ... */
10825 if (integer_zerop (arg1))
10826 return non_lvalue (fold_convert (type, arg0));
10827 if (integer_zerop (arg0))
10828 return omit_one_operand (type, arg0, arg1);
10830 /* Since negative shift count is not well-defined,
10831 don't try to compute it in the compiler. */
10832 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10835 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10836 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10837 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10838 && host_integerp (TREE_OPERAND (arg0, 1), false)
10839 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10841 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10842 + TREE_INT_CST_LOW (arg1));
10844 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10845 being well defined. */
10846 if (low >= TYPE_PRECISION (type))
10848 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10849 low = low % TYPE_PRECISION (type);
10850 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10851 return build_int_cst (type, 0);
10853 low = TYPE_PRECISION (type) - 1;
10856 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10857 build_int_cst (type, low));
10860 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10861 into x & ((unsigned)-1 >> c) for unsigned types. */
10862 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10863 || (TYPE_UNSIGNED (type)
10864 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10865 && host_integerp (arg1, false)
10866 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10867 && host_integerp (TREE_OPERAND (arg0, 1), false)
10868 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10870 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10871 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10877 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10879 lshift = build_int_cst (type, -1);
10880 lshift = int_const_binop (code, lshift, arg1, 0);
10882 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10886 /* Rewrite an LROTATE_EXPR by a constant into an
10887 RROTATE_EXPR by a new constant. */
10888 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10890 tree tem = build_int_cst (TREE_TYPE (arg1),
10891 GET_MODE_BITSIZE (TYPE_MODE (type)));
10892 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10893 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10896 /* If we have a rotate of a bit operation with the rotate count and
10897 the second operand of the bit operation both constant,
10898 permute the two operations. */
10899 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10900 && (TREE_CODE (arg0) == BIT_AND_EXPR
10901 || TREE_CODE (arg0) == BIT_IOR_EXPR
10902 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10903 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10904 return fold_build2 (TREE_CODE (arg0), type,
10905 fold_build2 (code, type,
10906 TREE_OPERAND (arg0, 0), arg1),
10907 fold_build2 (code, type,
10908 TREE_OPERAND (arg0, 1), arg1));
10910 /* Two consecutive rotates adding up to the width of the mode can
10912 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10913 && TREE_CODE (arg0) == RROTATE_EXPR
10914 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10915 && TREE_INT_CST_HIGH (arg1) == 0
10916 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10917 && ((TREE_INT_CST_LOW (arg1)
10918 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10919 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10920 return TREE_OPERAND (arg0, 0);
10925 if (operand_equal_p (arg0, arg1, 0))
10926 return omit_one_operand (type, arg0, arg1);
10927 if (INTEGRAL_TYPE_P (type)
10928 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10929 return omit_one_operand (type, arg1, arg0);
10930 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10936 if (operand_equal_p (arg0, arg1, 0))
10937 return omit_one_operand (type, arg0, arg1);
10938 if (INTEGRAL_TYPE_P (type)
10939 && TYPE_MAX_VALUE (type)
10940 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10941 return omit_one_operand (type, arg1, arg0);
10942 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10947 case TRUTH_ANDIF_EXPR:
10948 /* Note that the operands of this must be ints
10949 and their values must be 0 or 1.
10950 ("true" is a fixed value perhaps depending on the language.) */
10951 /* If first arg is constant zero, return it. */
10952 if (integer_zerop (arg0))
10953 return fold_convert (type, arg0);
10954 case TRUTH_AND_EXPR:
10955 /* If either arg is constant true, drop it. */
10956 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10957 return non_lvalue (fold_convert (type, arg1));
10958 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10959 /* Preserve sequence points. */
10960 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10961 return non_lvalue (fold_convert (type, arg0));
10962 /* If second arg is constant zero, result is zero, but first arg
10963 must be evaluated. */
10964 if (integer_zerop (arg1))
10965 return omit_one_operand (type, arg1, arg0);
10966 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10967 case will be handled here. */
10968 if (integer_zerop (arg0))
10969 return omit_one_operand (type, arg0, arg1);
10971 /* !X && X is always false. */
10972 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10973 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10974 return omit_one_operand (type, integer_zero_node, arg1);
10975 /* X && !X is always false. */
10976 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10977 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10978 return omit_one_operand (type, integer_zero_node, arg0);
10980 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10981 means A >= Y && A != MAX, but in this case we know that
10984 if (!TREE_SIDE_EFFECTS (arg0)
10985 && !TREE_SIDE_EFFECTS (arg1))
10987 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10988 if (tem && !operand_equal_p (tem, arg0, 0))
10989 return fold_build2 (code, type, tem, arg1);
10991 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10992 if (tem && !operand_equal_p (tem, arg1, 0))
10993 return fold_build2 (code, type, arg0, tem);
10997 /* We only do these simplifications if we are optimizing. */
11001 /* Check for things like (A || B) && (A || C). We can convert this
11002 to A || (B && C). Note that either operator can be any of the four
11003 truth and/or operations and the transformation will still be
11004 valid. Also note that we only care about order for the
11005 ANDIF and ORIF operators. If B contains side effects, this
11006 might change the truth-value of A. */
11007 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11008 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11009 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11010 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11011 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11012 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11014 tree a00 = TREE_OPERAND (arg0, 0);
11015 tree a01 = TREE_OPERAND (arg0, 1);
11016 tree a10 = TREE_OPERAND (arg1, 0);
11017 tree a11 = TREE_OPERAND (arg1, 1);
11018 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11019 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11020 && (code == TRUTH_AND_EXPR
11021 || code == TRUTH_OR_EXPR));
11023 if (operand_equal_p (a00, a10, 0))
11024 return fold_build2 (TREE_CODE (arg0), type, a00,
11025 fold_build2 (code, type, a01, a11));
11026 else if (commutative && operand_equal_p (a00, a11, 0))
11027 return fold_build2 (TREE_CODE (arg0), type, a00,
11028 fold_build2 (code, type, a01, a10));
11029 else if (commutative && operand_equal_p (a01, a10, 0))
11030 return fold_build2 (TREE_CODE (arg0), type, a01,
11031 fold_build2 (code, type, a00, a11));
11033 /* This case if tricky because we must either have commutative
11034 operators or else A10 must not have side-effects. */
11036 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11037 && operand_equal_p (a01, a11, 0))
11038 return fold_build2 (TREE_CODE (arg0), type,
11039 fold_build2 (code, type, a00, a10),
11043 /* See if we can build a range comparison. */
11044 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11047 /* Check for the possibility of merging component references. If our
11048 lhs is another similar operation, try to merge its rhs with our
11049 rhs. Then try to merge our lhs and rhs. */
11050 if (TREE_CODE (arg0) == code
11051 && 0 != (tem = fold_truthop (code, type,
11052 TREE_OPERAND (arg0, 1), arg1)))
11053 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11055 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11060 case TRUTH_ORIF_EXPR:
11061 /* Note that the operands of this must be ints
11062 and their values must be 0 or true.
11063 ("true" is a fixed value perhaps depending on the language.) */
11064 /* If first arg is constant true, return it. */
11065 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11066 return fold_convert (type, arg0);
11067 case TRUTH_OR_EXPR:
11068 /* If either arg is constant zero, drop it. */
11069 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11070 return non_lvalue (fold_convert (type, arg1));
11071 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11072 /* Preserve sequence points. */
11073 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11074 return non_lvalue (fold_convert (type, arg0));
11075 /* If second arg is constant true, result is true, but we must
11076 evaluate first arg. */
11077 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11078 return omit_one_operand (type, arg1, arg0);
11079 /* Likewise for first arg, but note this only occurs here for
11081 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11082 return omit_one_operand (type, arg0, arg1);
11084 /* !X || X is always true. */
11085 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11086 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11087 return omit_one_operand (type, integer_one_node, arg1);
11088 /* X || !X is always true. */
11089 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11090 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11091 return omit_one_operand (type, integer_one_node, arg0);
11095 case TRUTH_XOR_EXPR:
11096 /* If the second arg is constant zero, drop it. */
11097 if (integer_zerop (arg1))
11098 return non_lvalue (fold_convert (type, arg0));
11099 /* If the second arg is constant true, this is a logical inversion. */
11100 if (integer_onep (arg1))
11102 /* Only call invert_truthvalue if operand is a truth value. */
11103 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11104 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11106 tem = invert_truthvalue (arg0);
11107 return non_lvalue (fold_convert (type, tem));
11109 /* Identical arguments cancel to zero. */
11110 if (operand_equal_p (arg0, arg1, 0))
11111 return omit_one_operand (type, integer_zero_node, arg0);
11113 /* !X ^ X is always true. */
11114 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11115 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11116 return omit_one_operand (type, integer_one_node, arg1);
11118 /* X ^ !X is always true. */
11119 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11120 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11121 return omit_one_operand (type, integer_one_node, arg0);
11127 tem = fold_comparison (code, type, op0, op1);
11128 if (tem != NULL_TREE)
11131 /* bool_var != 0 becomes bool_var. */
11132 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11133 && code == NE_EXPR)
11134 return non_lvalue (fold_convert (type, arg0));
11136 /* bool_var == 1 becomes bool_var. */
11137 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11138 && code == EQ_EXPR)
11139 return non_lvalue (fold_convert (type, arg0));
11141 /* bool_var != 1 becomes !bool_var. */
11142 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11143 && code == NE_EXPR)
11144 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11146 /* bool_var == 0 becomes !bool_var. */
11147 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11148 && code == EQ_EXPR)
11149 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11151 /* If this is an equality comparison of the address of two non-weak,
11152 unaliased symbols neither of which are extern (since we do not
11153 have access to attributes for externs), then we know the result. */
11154 if (TREE_CODE (arg0) == ADDR_EXPR
11155 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11156 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11157 && ! lookup_attribute ("alias",
11158 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11159 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11160 && TREE_CODE (arg1) == ADDR_EXPR
11161 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11162 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11163 && ! lookup_attribute ("alias",
11164 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11165 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11167 /* We know that we're looking at the address of two
11168 non-weak, unaliased, static _DECL nodes.
11170 It is both wasteful and incorrect to call operand_equal_p
11171 to compare the two ADDR_EXPR nodes. It is wasteful in that
11172 all we need to do is test pointer equality for the arguments
11173 to the two ADDR_EXPR nodes. It is incorrect to use
11174 operand_equal_p as that function is NOT equivalent to a
11175 C equality test. It can in fact return false for two
11176 objects which would test as equal using the C equality
11178 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11179 return constant_boolean_node (equal
11180 ? code == EQ_EXPR : code != EQ_EXPR,
11184 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11185 a MINUS_EXPR of a constant, we can convert it into a comparison with
11186 a revised constant as long as no overflow occurs. */
11187 if (TREE_CODE (arg1) == INTEGER_CST
11188 && (TREE_CODE (arg0) == PLUS_EXPR
11189 || TREE_CODE (arg0) == MINUS_EXPR)
11190 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11191 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11192 ? MINUS_EXPR : PLUS_EXPR,
11193 fold_convert (TREE_TYPE (arg0), arg1),
11194 TREE_OPERAND (arg0, 1), 0))
11195 && !TREE_OVERFLOW (tem))
11196 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11198 /* Similarly for a NEGATE_EXPR. */
11199 if (TREE_CODE (arg0) == NEGATE_EXPR
11200 && TREE_CODE (arg1) == INTEGER_CST
11201 && 0 != (tem = negate_expr (arg1))
11202 && TREE_CODE (tem) == INTEGER_CST
11203 && !TREE_OVERFLOW (tem))
11204 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11206 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11207 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11208 && TREE_CODE (arg1) == INTEGER_CST
11209 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11210 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11211 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11212 fold_convert (TREE_TYPE (arg0), arg1),
11213 TREE_OPERAND (arg0, 1)));
11215 /* Transform comparisons of the form X +- C CMP X. */
11216 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11217 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11218 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11219 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11220 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11222 tree cst = TREE_OPERAND (arg0, 1);
11224 if (code == EQ_EXPR
11225 && !integer_zerop (cst))
11226 return omit_two_operands (type, boolean_false_node,
11227 TREE_OPERAND (arg0, 0), arg1);
11229 return omit_two_operands (type, boolean_true_node,
11230 TREE_OPERAND (arg0, 0), arg1);
11233 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11234 for !=. Don't do this for ordered comparisons due to overflow. */
11235 if (TREE_CODE (arg0) == MINUS_EXPR
11236 && integer_zerop (arg1))
11237 return fold_build2 (code, type,
11238 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11240 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11241 if (TREE_CODE (arg0) == ABS_EXPR
11242 && (integer_zerop (arg1) || real_zerop (arg1)))
11243 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11245 /* If this is an EQ or NE comparison with zero and ARG0 is
11246 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11247 two operations, but the latter can be done in one less insn
11248 on machines that have only two-operand insns or on which a
11249 constant cannot be the first operand. */
11250 if (TREE_CODE (arg0) == BIT_AND_EXPR
11251 && integer_zerop (arg1))
11253 tree arg00 = TREE_OPERAND (arg0, 0);
11254 tree arg01 = TREE_OPERAND (arg0, 1);
11255 if (TREE_CODE (arg00) == LSHIFT_EXPR
11256 && integer_onep (TREE_OPERAND (arg00, 0)))
11258 fold_build2 (code, type,
11259 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11260 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11261 arg01, TREE_OPERAND (arg00, 1)),
11262 fold_convert (TREE_TYPE (arg0),
11263 integer_one_node)),
11265 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11266 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11268 fold_build2 (code, type,
11269 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11270 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11271 arg00, TREE_OPERAND (arg01, 1)),
11272 fold_convert (TREE_TYPE (arg0),
11273 integer_one_node)),
11277 /* If this is an NE or EQ comparison of zero against the result of a
11278 signed MOD operation whose second operand is a power of 2, make
11279 the MOD operation unsigned since it is simpler and equivalent. */
11280 if (integer_zerop (arg1)
11281 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11282 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11283 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11284 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11285 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11286 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11288 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11289 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11290 fold_convert (newtype,
11291 TREE_OPERAND (arg0, 0)),
11292 fold_convert (newtype,
11293 TREE_OPERAND (arg0, 1)));
11295 return fold_build2 (code, type, newmod,
11296 fold_convert (newtype, arg1));
11299 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11300 C1 is a valid shift constant, and C2 is a power of two, i.e.
11302 if (TREE_CODE (arg0) == BIT_AND_EXPR
11303 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11304 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11306 && integer_pow2p (TREE_OPERAND (arg0, 1))
11307 && integer_zerop (arg1))
11309 tree itype = TREE_TYPE (arg0);
11310 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11311 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11313 /* Check for a valid shift count. */
11314 if (TREE_INT_CST_HIGH (arg001) == 0
11315 && TREE_INT_CST_LOW (arg001) < prec)
11317 tree arg01 = TREE_OPERAND (arg0, 1);
11318 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11319 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11320 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11321 can be rewritten as (X & (C2 << C1)) != 0. */
11322 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11324 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11325 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11326 return fold_build2 (code, type, tem, arg1);
11328 /* Otherwise, for signed (arithmetic) shifts,
11329 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11330 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11331 else if (!TYPE_UNSIGNED (itype))
11332 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11333 arg000, build_int_cst (itype, 0));
11334 /* Otherwise, of unsigned (logical) shifts,
11335 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11336 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11338 return omit_one_operand (type,
11339 code == EQ_EXPR ? integer_one_node
11340 : integer_zero_node,
11345 /* If this is an NE comparison of zero with an AND of one, remove the
11346 comparison since the AND will give the correct value. */
11347 if (code == NE_EXPR
11348 && integer_zerop (arg1)
11349 && TREE_CODE (arg0) == BIT_AND_EXPR
11350 && integer_onep (TREE_OPERAND (arg0, 1)))
11351 return fold_convert (type, arg0);
11353 /* If we have (A & C) == C where C is a power of 2, convert this into
11354 (A & C) != 0. Similarly for NE_EXPR. */
11355 if (TREE_CODE (arg0) == BIT_AND_EXPR
11356 && integer_pow2p (TREE_OPERAND (arg0, 1))
11357 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11358 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11359 arg0, fold_convert (TREE_TYPE (arg0),
11360 integer_zero_node));
11362 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11363 bit, then fold the expression into A < 0 or A >= 0. */
11364 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11368 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11369 Similarly for NE_EXPR. */
11370 if (TREE_CODE (arg0) == BIT_AND_EXPR
11371 && TREE_CODE (arg1) == INTEGER_CST
11372 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11374 tree notc = fold_build1 (BIT_NOT_EXPR,
11375 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11376 TREE_OPERAND (arg0, 1));
11377 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11379 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11380 if (integer_nonzerop (dandnotc))
11381 return omit_one_operand (type, rslt, arg0);
11384 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11385 Similarly for NE_EXPR. */
11386 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11387 && TREE_CODE (arg1) == INTEGER_CST
11388 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11390 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11391 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11392 TREE_OPERAND (arg0, 1), notd);
11393 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11394 if (integer_nonzerop (candnotd))
11395 return omit_one_operand (type, rslt, arg0);
11398 /* If this is a comparison of a field, we may be able to simplify it. */
11399 if ((TREE_CODE (arg0) == COMPONENT_REF
11400 || TREE_CODE (arg0) == BIT_FIELD_REF)
11401 /* Handle the constant case even without -O
11402 to make sure the warnings are given. */
11403 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11405 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11410 /* Optimize comparisons of strlen vs zero to a compare of the
11411 first character of the string vs zero. To wit,
11412 strlen(ptr) == 0 => *ptr == 0
11413 strlen(ptr) != 0 => *ptr != 0
11414 Other cases should reduce to one of these two (or a constant)
11415 due to the return value of strlen being unsigned. */
11416 if (TREE_CODE (arg0) == CALL_EXPR
11417 && integer_zerop (arg1))
11419 tree fndecl = get_callee_fndecl (arg0);
11422 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11423 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11424 && call_expr_nargs (arg0) == 1
11425 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11427 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11428 return fold_build2 (code, type, iref,
11429 build_int_cst (TREE_TYPE (iref), 0));
11433 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11434 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11435 if (TREE_CODE (arg0) == RSHIFT_EXPR
11436 && integer_zerop (arg1)
11437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11439 tree arg00 = TREE_OPERAND (arg0, 0);
11440 tree arg01 = TREE_OPERAND (arg0, 1);
11441 tree itype = TREE_TYPE (arg00);
11442 if (TREE_INT_CST_HIGH (arg01) == 0
11443 && TREE_INT_CST_LOW (arg01)
11444 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11446 if (TYPE_UNSIGNED (itype))
11448 itype = lang_hooks.types.signed_type (itype);
11449 arg00 = fold_convert (itype, arg00);
11451 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11452 type, arg00, build_int_cst (itype, 0));
11456 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11457 if (integer_zerop (arg1)
11458 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11459 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11460 TREE_OPERAND (arg0, 1));
11462 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11463 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11464 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11465 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11466 build_int_cst (TREE_TYPE (arg1), 0));
11467 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11468 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11469 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11470 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11471 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11472 build_int_cst (TREE_TYPE (arg1), 0));
11474 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11475 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11476 && TREE_CODE (arg1) == INTEGER_CST
11477 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11478 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11479 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11480 TREE_OPERAND (arg0, 1), arg1));
11482 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11483 (X & C) == 0 when C is a single bit. */
11484 if (TREE_CODE (arg0) == BIT_AND_EXPR
11485 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11486 && integer_zerop (arg1)
11487 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11489 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11490 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11491 TREE_OPERAND (arg0, 1));
11492 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11496 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11497 constant C is a power of two, i.e. a single bit. */
11498 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11499 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11500 && integer_zerop (arg1)
11501 && integer_pow2p (TREE_OPERAND (arg0, 1))
11502 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11503 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11505 tree arg00 = TREE_OPERAND (arg0, 0);
11506 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11507 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11510 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11511 when is C is a power of two, i.e. a single bit. */
11512 if (TREE_CODE (arg0) == BIT_AND_EXPR
11513 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11514 && integer_zerop (arg1)
11515 && integer_pow2p (TREE_OPERAND (arg0, 1))
11516 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11517 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11519 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11520 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11521 arg000, TREE_OPERAND (arg0, 1));
11522 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11523 tem, build_int_cst (TREE_TYPE (tem), 0));
11526 if (integer_zerop (arg1)
11527 && tree_expr_nonzero_p (arg0))
11529 tree res = constant_boolean_node (code==NE_EXPR, type);
11530 return omit_one_operand (type, res, arg0);
11533 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11534 if (TREE_CODE (arg0) == NEGATE_EXPR
11535 && TREE_CODE (arg1) == NEGATE_EXPR)
11536 return fold_build2 (code, type,
11537 TREE_OPERAND (arg0, 0),
11538 TREE_OPERAND (arg1, 0));
11540 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11541 if (TREE_CODE (arg0) == BIT_AND_EXPR
11542 && TREE_CODE (arg1) == BIT_AND_EXPR)
11544 tree arg00 = TREE_OPERAND (arg0, 0);
11545 tree arg01 = TREE_OPERAND (arg0, 1);
11546 tree arg10 = TREE_OPERAND (arg1, 0);
11547 tree arg11 = TREE_OPERAND (arg1, 1);
11548 tree itype = TREE_TYPE (arg0);
11550 if (operand_equal_p (arg01, arg11, 0))
11551 return fold_build2 (code, type,
11552 fold_build2 (BIT_AND_EXPR, itype,
11553 fold_build2 (BIT_XOR_EXPR, itype,
11556 build_int_cst (itype, 0));
11558 if (operand_equal_p (arg01, arg10, 0))
11559 return fold_build2 (code, type,
11560 fold_build2 (BIT_AND_EXPR, itype,
11561 fold_build2 (BIT_XOR_EXPR, itype,
11564 build_int_cst (itype, 0));
11566 if (operand_equal_p (arg00, arg11, 0))
11567 return fold_build2 (code, type,
11568 fold_build2 (BIT_AND_EXPR, itype,
11569 fold_build2 (BIT_XOR_EXPR, itype,
11572 build_int_cst (itype, 0));
11574 if (operand_equal_p (arg00, arg10, 0))
11575 return fold_build2 (code, type,
11576 fold_build2 (BIT_AND_EXPR, itype,
11577 fold_build2 (BIT_XOR_EXPR, itype,
11580 build_int_cst (itype, 0));
11583 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11584 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11586 tree arg00 = TREE_OPERAND (arg0, 0);
11587 tree arg01 = TREE_OPERAND (arg0, 1);
11588 tree arg10 = TREE_OPERAND (arg1, 0);
11589 tree arg11 = TREE_OPERAND (arg1, 1);
11590 tree itype = TREE_TYPE (arg0);
11592 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11593 operand_equal_p guarantees no side-effects so we don't need
11594 to use omit_one_operand on Z. */
11595 if (operand_equal_p (arg01, arg11, 0))
11596 return fold_build2 (code, type, arg00, arg10);
11597 if (operand_equal_p (arg01, arg10, 0))
11598 return fold_build2 (code, type, arg00, arg11);
11599 if (operand_equal_p (arg00, arg11, 0))
11600 return fold_build2 (code, type, arg01, arg10);
11601 if (operand_equal_p (arg00, arg10, 0))
11602 return fold_build2 (code, type, arg01, arg11);
11604 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11605 if (TREE_CODE (arg01) == INTEGER_CST
11606 && TREE_CODE (arg11) == INTEGER_CST)
11607 return fold_build2 (code, type,
11608 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11609 fold_build2 (BIT_XOR_EXPR, itype,
11614 /* Attempt to simplify equality/inequality comparisons of complex
11615 values. Only lower the comparison if the result is known or
11616 can be simplified to a single scalar comparison. */
11617 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11618 || TREE_CODE (arg0) == COMPLEX_CST)
11619 && (TREE_CODE (arg1) == COMPLEX_EXPR
11620 || TREE_CODE (arg1) == COMPLEX_CST))
11622 tree real0, imag0, real1, imag1;
11625 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11627 real0 = TREE_OPERAND (arg0, 0);
11628 imag0 = TREE_OPERAND (arg0, 1);
11632 real0 = TREE_REALPART (arg0);
11633 imag0 = TREE_IMAGPART (arg0);
11636 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11638 real1 = TREE_OPERAND (arg1, 0);
11639 imag1 = TREE_OPERAND (arg1, 1);
11643 real1 = TREE_REALPART (arg1);
11644 imag1 = TREE_IMAGPART (arg1);
11647 rcond = fold_binary (code, type, real0, real1);
11648 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11650 if (integer_zerop (rcond))
11652 if (code == EQ_EXPR)
11653 return omit_two_operands (type, boolean_false_node,
11655 return fold_build2 (NE_EXPR, type, imag0, imag1);
11659 if (code == NE_EXPR)
11660 return omit_two_operands (type, boolean_true_node,
11662 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11666 icond = fold_binary (code, type, imag0, imag1);
11667 if (icond && TREE_CODE (icond) == INTEGER_CST)
11669 if (integer_zerop (icond))
11671 if (code == EQ_EXPR)
11672 return omit_two_operands (type, boolean_false_node,
11674 return fold_build2 (NE_EXPR, type, real0, real1);
11678 if (code == NE_EXPR)
11679 return omit_two_operands (type, boolean_true_node,
11681 return fold_build2 (EQ_EXPR, type, real0, real1);
11692 tem = fold_comparison (code, type, op0, op1);
11693 if (tem != NULL_TREE)
11696 /* Transform comparisons of the form X +- C CMP X. */
11697 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11698 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11699 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11700 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11701 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11702 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11704 tree arg01 = TREE_OPERAND (arg0, 1);
11705 enum tree_code code0 = TREE_CODE (arg0);
11708 if (TREE_CODE (arg01) == REAL_CST)
11709 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11711 is_positive = tree_int_cst_sgn (arg01);
11713 /* (X - c) > X becomes false. */
11714 if (code == GT_EXPR
11715 && ((code0 == MINUS_EXPR && is_positive >= 0)
11716 || (code0 == PLUS_EXPR && is_positive <= 0)))
11718 if (TREE_CODE (arg01) == INTEGER_CST
11719 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11720 fold_overflow_warning (("assuming signed overflow does not "
11721 "occur when assuming that (X - c) > X "
11722 "is always false"),
11723 WARN_STRICT_OVERFLOW_ALL);
11724 return constant_boolean_node (0, type);
11727 /* Likewise (X + c) < X becomes false. */
11728 if (code == LT_EXPR
11729 && ((code0 == PLUS_EXPR && is_positive >= 0)
11730 || (code0 == MINUS_EXPR && is_positive <= 0)))
11732 if (TREE_CODE (arg01) == INTEGER_CST
11733 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11734 fold_overflow_warning (("assuming signed overflow does not "
11735 "occur when assuming that "
11736 "(X + c) < X is always false"),
11737 WARN_STRICT_OVERFLOW_ALL);
11738 return constant_boolean_node (0, type);
11741 /* Convert (X - c) <= X to true. */
11742 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11744 && ((code0 == MINUS_EXPR && is_positive >= 0)
11745 || (code0 == PLUS_EXPR && is_positive <= 0)))
11747 if (TREE_CODE (arg01) == INTEGER_CST
11748 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11749 fold_overflow_warning (("assuming signed overflow does not "
11750 "occur when assuming that "
11751 "(X - c) <= X is always true"),
11752 WARN_STRICT_OVERFLOW_ALL);
11753 return constant_boolean_node (1, type);
11756 /* Convert (X + c) >= X to true. */
11757 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11759 && ((code0 == PLUS_EXPR && is_positive >= 0)
11760 || (code0 == MINUS_EXPR && is_positive <= 0)))
11762 if (TREE_CODE (arg01) == INTEGER_CST
11763 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11764 fold_overflow_warning (("assuming signed overflow does not "
11765 "occur when assuming that "
11766 "(X + c) >= X is always true"),
11767 WARN_STRICT_OVERFLOW_ALL);
11768 return constant_boolean_node (1, type);
11771 if (TREE_CODE (arg01) == INTEGER_CST)
11773 /* Convert X + c > X and X - c < X to true for integers. */
11774 if (code == GT_EXPR
11775 && ((code0 == PLUS_EXPR && is_positive > 0)
11776 || (code0 == MINUS_EXPR && is_positive < 0)))
11778 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11779 fold_overflow_warning (("assuming signed overflow does "
11780 "not occur when assuming that "
11781 "(X + c) > X is always true"),
11782 WARN_STRICT_OVERFLOW_ALL);
11783 return constant_boolean_node (1, type);
11786 if (code == LT_EXPR
11787 && ((code0 == MINUS_EXPR && is_positive > 0)
11788 || (code0 == PLUS_EXPR && is_positive < 0)))
11790 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11791 fold_overflow_warning (("assuming signed overflow does "
11792 "not occur when assuming that "
11793 "(X - c) < X is always true"),
11794 WARN_STRICT_OVERFLOW_ALL);
11795 return constant_boolean_node (1, type);
11798 /* Convert X + c <= X and X - c >= X to false for integers. */
11799 if (code == LE_EXPR
11800 && ((code0 == PLUS_EXPR && is_positive > 0)
11801 || (code0 == MINUS_EXPR && is_positive < 0)))
11803 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11804 fold_overflow_warning (("assuming signed overflow does "
11805 "not occur when assuming that "
11806 "(X + c) <= X is always false"),
11807 WARN_STRICT_OVERFLOW_ALL);
11808 return constant_boolean_node (0, type);
11811 if (code == GE_EXPR
11812 && ((code0 == MINUS_EXPR && is_positive > 0)
11813 || (code0 == PLUS_EXPR && is_positive < 0)))
11815 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11816 fold_overflow_warning (("assuming signed overflow does "
11817 "not occur when assuming that "
11818 "(X - c) >= X is always false"),
11819 WARN_STRICT_OVERFLOW_ALL);
11820 return constant_boolean_node (0, type);
11825 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11826 This transformation affects the cases which are handled in later
11827 optimizations involving comparisons with non-negative constants. */
11828 if (TREE_CODE (arg1) == INTEGER_CST
11829 && TREE_CODE (arg0) != INTEGER_CST
11830 && tree_int_cst_sgn (arg1) > 0)
11832 if (code == GE_EXPR)
11834 arg1 = const_binop (MINUS_EXPR, arg1,
11835 build_int_cst (TREE_TYPE (arg1), 1), 0);
11836 return fold_build2 (GT_EXPR, type, arg0,
11837 fold_convert (TREE_TYPE (arg0), arg1));
11839 if (code == LT_EXPR)
11841 arg1 = const_binop (MINUS_EXPR, arg1,
11842 build_int_cst (TREE_TYPE (arg1), 1), 0);
11843 return fold_build2 (LE_EXPR, type, arg0,
11844 fold_convert (TREE_TYPE (arg0), arg1));
11848 /* Comparisons with the highest or lowest possible integer of
11849 the specified precision will have known values. */
11851 tree arg1_type = TREE_TYPE (arg1);
11852 unsigned int width = TYPE_PRECISION (arg1_type);
11854 if (TREE_CODE (arg1) == INTEGER_CST
11855 && !TREE_OVERFLOW (arg1)
11856 && width <= 2 * HOST_BITS_PER_WIDE_INT
11857 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11859 HOST_WIDE_INT signed_max_hi;
11860 unsigned HOST_WIDE_INT signed_max_lo;
11861 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11863 if (width <= HOST_BITS_PER_WIDE_INT)
11865 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11870 if (TYPE_UNSIGNED (arg1_type))
11872 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11878 max_lo = signed_max_lo;
11879 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11885 width -= HOST_BITS_PER_WIDE_INT;
11886 signed_max_lo = -1;
11887 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11892 if (TYPE_UNSIGNED (arg1_type))
11894 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11899 max_hi = signed_max_hi;
11900 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11904 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11905 && TREE_INT_CST_LOW (arg1) == max_lo)
11909 return omit_one_operand (type, integer_zero_node, arg0);
11912 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11915 return omit_one_operand (type, integer_one_node, arg0);
11918 return fold_build2 (NE_EXPR, type, arg0, arg1);
11920 /* The GE_EXPR and LT_EXPR cases above are not normally
11921 reached because of previous transformations. */
11926 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11928 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11932 arg1 = const_binop (PLUS_EXPR, arg1,
11933 build_int_cst (TREE_TYPE (arg1), 1), 0);
11934 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11936 arg1 = const_binop (PLUS_EXPR, arg1,
11937 build_int_cst (TREE_TYPE (arg1), 1), 0);
11938 return fold_build2 (NE_EXPR, type, arg0, arg1);
11942 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11944 && TREE_INT_CST_LOW (arg1) == min_lo)
11948 return omit_one_operand (type, integer_zero_node, arg0);
11951 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11954 return omit_one_operand (type, integer_one_node, arg0);
11957 return fold_build2 (NE_EXPR, type, op0, op1);
11962 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11964 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11968 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11969 return fold_build2 (NE_EXPR, type, arg0, arg1);
11971 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11972 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11977 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11978 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11979 && TYPE_UNSIGNED (arg1_type)
11980 /* We will flip the signedness of the comparison operator
11981 associated with the mode of arg1, so the sign bit is
11982 specified by this mode. Check that arg1 is the signed
11983 max associated with this sign bit. */
11984 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11985 /* signed_type does not work on pointer types. */
11986 && INTEGRAL_TYPE_P (arg1_type))
11988 /* The following case also applies to X < signed_max+1
11989 and X >= signed_max+1 because previous transformations. */
11990 if (code == LE_EXPR || code == GT_EXPR)
11993 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11994 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11995 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11996 type, fold_convert (st0, arg0),
11997 build_int_cst (st1, 0));
12003 /* If we are comparing an ABS_EXPR with a constant, we can
12004 convert all the cases into explicit comparisons, but they may
12005 well not be faster than doing the ABS and one comparison.
12006 But ABS (X) <= C is a range comparison, which becomes a subtraction
12007 and a comparison, and is probably faster. */
12008 if (code == LE_EXPR
12009 && TREE_CODE (arg1) == INTEGER_CST
12010 && TREE_CODE (arg0) == ABS_EXPR
12011 && ! TREE_SIDE_EFFECTS (arg0)
12012 && (0 != (tem = negate_expr (arg1)))
12013 && TREE_CODE (tem) == INTEGER_CST
12014 && !TREE_OVERFLOW (tem))
12015 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12016 build2 (GE_EXPR, type,
12017 TREE_OPERAND (arg0, 0), tem),
12018 build2 (LE_EXPR, type,
12019 TREE_OPERAND (arg0, 0), arg1));
12021 /* Convert ABS_EXPR<x> >= 0 to true. */
12022 strict_overflow_p = false;
12023 if (code == GE_EXPR
12024 && (integer_zerop (arg1)
12025 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12026 && real_zerop (arg1)))
12027 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12029 if (strict_overflow_p)
12030 fold_overflow_warning (("assuming signed overflow does not occur "
12031 "when simplifying comparison of "
12032 "absolute value and zero"),
12033 WARN_STRICT_OVERFLOW_CONDITIONAL);
12034 return omit_one_operand (type, integer_one_node, arg0);
12037 /* Convert ABS_EXPR<x> < 0 to false. */
12038 strict_overflow_p = false;
12039 if (code == LT_EXPR
12040 && (integer_zerop (arg1) || real_zerop (arg1))
12041 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12043 if (strict_overflow_p)
12044 fold_overflow_warning (("assuming signed overflow does not occur "
12045 "when simplifying comparison of "
12046 "absolute value and zero"),
12047 WARN_STRICT_OVERFLOW_CONDITIONAL);
12048 return omit_one_operand (type, integer_zero_node, arg0);
12051 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12052 and similarly for >= into !=. */
12053 if ((code == LT_EXPR || code == GE_EXPR)
12054 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12055 && TREE_CODE (arg1) == LSHIFT_EXPR
12056 && integer_onep (TREE_OPERAND (arg1, 0)))
12057 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12058 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12059 TREE_OPERAND (arg1, 1)),
12060 build_int_cst (TREE_TYPE (arg0), 0));
12062 if ((code == LT_EXPR || code == GE_EXPR)
12063 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12064 && (TREE_CODE (arg1) == NOP_EXPR
12065 || TREE_CODE (arg1) == CONVERT_EXPR)
12066 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12067 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12069 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12070 fold_convert (TREE_TYPE (arg0),
12071 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12072 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12074 build_int_cst (TREE_TYPE (arg0), 0));
12078 case UNORDERED_EXPR:
12086 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12088 t1 = fold_relational_const (code, type, arg0, arg1);
12089 if (t1 != NULL_TREE)
12093 /* If the first operand is NaN, the result is constant. */
12094 if (TREE_CODE (arg0) == REAL_CST
12095 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12096 && (code != LTGT_EXPR || ! flag_trapping_math))
12098 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12099 ? integer_zero_node
12100 : integer_one_node;
12101 return omit_one_operand (type, t1, arg1);
12104 /* If the second operand is NaN, the result is constant. */
12105 if (TREE_CODE (arg1) == REAL_CST
12106 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12107 && (code != LTGT_EXPR || ! flag_trapping_math))
12109 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12110 ? integer_zero_node
12111 : integer_one_node;
12112 return omit_one_operand (type, t1, arg0);
12115 /* Simplify unordered comparison of something with itself. */
12116 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12117 && operand_equal_p (arg0, arg1, 0))
12118 return constant_boolean_node (1, type);
12120 if (code == LTGT_EXPR
12121 && !flag_trapping_math
12122 && operand_equal_p (arg0, arg1, 0))
12123 return constant_boolean_node (0, type);
12125 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12127 tree targ0 = strip_float_extensions (arg0);
12128 tree targ1 = strip_float_extensions (arg1);
12129 tree newtype = TREE_TYPE (targ0);
12131 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12132 newtype = TREE_TYPE (targ1);
12134 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12135 return fold_build2 (code, type, fold_convert (newtype, targ0),
12136 fold_convert (newtype, targ1));
12141 case COMPOUND_EXPR:
12142 /* When pedantic, a compound expression can be neither an lvalue
12143 nor an integer constant expression. */
12144 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12146 /* Don't let (0, 0) be null pointer constant. */
12147 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12148 : fold_convert (type, arg1);
12149 return pedantic_non_lvalue (tem);
12152 if ((TREE_CODE (arg0) == REAL_CST
12153 && TREE_CODE (arg1) == REAL_CST)
12154 || (TREE_CODE (arg0) == INTEGER_CST
12155 && TREE_CODE (arg1) == INTEGER_CST))
12156 return build_complex (type, arg0, arg1);
12160 /* An ASSERT_EXPR should never be passed to fold_binary. */
12161 gcc_unreachable ();
12165 } /* switch (code) */
12168 /* Callback for walk_tree, looking for LABEL_EXPR.
12169 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12170 Do not check the sub-tree of GOTO_EXPR. */
12173 contains_label_1 (tree *tp,
12174 int *walk_subtrees,
12175 void *data ATTRIBUTE_UNUSED)
12177 switch (TREE_CODE (*tp))
12182 *walk_subtrees = 0;
12189 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12190 accessible from outside the sub-tree. Returns NULL_TREE if no
12191 addressable label is found. */
12194 contains_label_p (tree st)
12196 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12199 /* Fold a ternary expression of code CODE and type TYPE with operands
12200 OP0, OP1, and OP2. Return the folded expression if folding is
12201 successful. Otherwise, return NULL_TREE. */
12204 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12207 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12208 enum tree_code_class kind = TREE_CODE_CLASS (code);
12210 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12211 && TREE_CODE_LENGTH (code) == 3);
12213 /* Strip any conversions that don't change the mode. This is safe
12214 for every expression, except for a comparison expression because
12215 its signedness is derived from its operands. So, in the latter
12216 case, only strip conversions that don't change the signedness.
12218 Note that this is done as an internal manipulation within the
12219 constant folder, in order to find the simplest representation of
12220 the arguments so that their form can be studied. In any cases,
12221 the appropriate type conversions should be put back in the tree
12222 that will get out of the constant folder. */
12237 case COMPONENT_REF:
12238 if (TREE_CODE (arg0) == CONSTRUCTOR
12239 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12241 unsigned HOST_WIDE_INT idx;
12243 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12250 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12251 so all simple results must be passed through pedantic_non_lvalue. */
12252 if (TREE_CODE (arg0) == INTEGER_CST)
12254 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12255 tem = integer_zerop (arg0) ? op2 : op1;
12256 /* Only optimize constant conditions when the selected branch
12257 has the same type as the COND_EXPR. This avoids optimizing
12258 away "c ? x : throw", where the throw has a void type.
12259 Avoid throwing away that operand which contains label. */
12260 if ((!TREE_SIDE_EFFECTS (unused_op)
12261 || !contains_label_p (unused_op))
12262 && (! VOID_TYPE_P (TREE_TYPE (tem))
12263 || VOID_TYPE_P (type)))
12264 return pedantic_non_lvalue (tem);
12267 if (operand_equal_p (arg1, op2, 0))
12268 return pedantic_omit_one_operand (type, arg1, arg0);
12270 /* If we have A op B ? A : C, we may be able to convert this to a
12271 simpler expression, depending on the operation and the values
12272 of B and C. Signed zeros prevent all of these transformations,
12273 for reasons given above each one.
12275 Also try swapping the arguments and inverting the conditional. */
12276 if (COMPARISON_CLASS_P (arg0)
12277 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12278 arg1, TREE_OPERAND (arg0, 1))
12279 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12281 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12286 if (COMPARISON_CLASS_P (arg0)
12287 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12289 TREE_OPERAND (arg0, 1))
12290 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12292 tem = fold_truth_not_expr (arg0);
12293 if (tem && COMPARISON_CLASS_P (tem))
12295 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12301 /* If the second operand is simpler than the third, swap them
12302 since that produces better jump optimization results. */
12303 if (truth_value_p (TREE_CODE (arg0))
12304 && tree_swap_operands_p (op1, op2, false))
12306 /* See if this can be inverted. If it can't, possibly because
12307 it was a floating-point inequality comparison, don't do
12309 tem = fold_truth_not_expr (arg0);
12311 return fold_build3 (code, type, tem, op2, op1);
12314 /* Convert A ? 1 : 0 to simply A. */
12315 if (integer_onep (op1)
12316 && integer_zerop (op2)
12317 /* If we try to convert OP0 to our type, the
12318 call to fold will try to move the conversion inside
12319 a COND, which will recurse. In that case, the COND_EXPR
12320 is probably the best choice, so leave it alone. */
12321 && type == TREE_TYPE (arg0))
12322 return pedantic_non_lvalue (arg0);
12324 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12325 over COND_EXPR in cases such as floating point comparisons. */
12326 if (integer_zerop (op1)
12327 && integer_onep (op2)
12328 && truth_value_p (TREE_CODE (arg0)))
12329 return pedantic_non_lvalue (fold_convert (type,
12330 invert_truthvalue (arg0)));
12332 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12333 if (TREE_CODE (arg0) == LT_EXPR
12334 && integer_zerop (TREE_OPERAND (arg0, 1))
12335 && integer_zerop (op2)
12336 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12338 /* sign_bit_p only checks ARG1 bits within A's precision.
12339 If <sign bit of A> has wider type than A, bits outside
12340 of A's precision in <sign bit of A> need to be checked.
12341 If they are all 0, this optimization needs to be done
12342 in unsigned A's type, if they are all 1 in signed A's type,
12343 otherwise this can't be done. */
12344 if (TYPE_PRECISION (TREE_TYPE (tem))
12345 < TYPE_PRECISION (TREE_TYPE (arg1))
12346 && TYPE_PRECISION (TREE_TYPE (tem))
12347 < TYPE_PRECISION (type))
12349 unsigned HOST_WIDE_INT mask_lo;
12350 HOST_WIDE_INT mask_hi;
12351 int inner_width, outer_width;
12354 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12355 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12356 if (outer_width > TYPE_PRECISION (type))
12357 outer_width = TYPE_PRECISION (type);
12359 if (outer_width > HOST_BITS_PER_WIDE_INT)
12361 mask_hi = ((unsigned HOST_WIDE_INT) -1
12362 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12368 mask_lo = ((unsigned HOST_WIDE_INT) -1
12369 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12371 if (inner_width > HOST_BITS_PER_WIDE_INT)
12373 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12374 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12378 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12379 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12381 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12382 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12384 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12385 tem = fold_convert (tem_type, tem);
12387 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12388 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12390 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12391 tem = fold_convert (tem_type, tem);
12398 return fold_convert (type,
12399 fold_build2 (BIT_AND_EXPR,
12400 TREE_TYPE (tem), tem,
12401 fold_convert (TREE_TYPE (tem),
12405 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12406 already handled above. */
12407 if (TREE_CODE (arg0) == BIT_AND_EXPR
12408 && integer_onep (TREE_OPERAND (arg0, 1))
12409 && integer_zerop (op2)
12410 && integer_pow2p (arg1))
12412 tree tem = TREE_OPERAND (arg0, 0);
12414 if (TREE_CODE (tem) == RSHIFT_EXPR
12415 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12416 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12417 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12418 return fold_build2 (BIT_AND_EXPR, type,
12419 TREE_OPERAND (tem, 0), arg1);
12422 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12423 is probably obsolete because the first operand should be a
12424 truth value (that's why we have the two cases above), but let's
12425 leave it in until we can confirm this for all front-ends. */
12426 if (integer_zerop (op2)
12427 && TREE_CODE (arg0) == NE_EXPR
12428 && integer_zerop (TREE_OPERAND (arg0, 1))
12429 && integer_pow2p (arg1)
12430 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12431 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12432 arg1, OEP_ONLY_CONST))
12433 return pedantic_non_lvalue (fold_convert (type,
12434 TREE_OPERAND (arg0, 0)));
12436 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12437 if (integer_zerop (op2)
12438 && truth_value_p (TREE_CODE (arg0))
12439 && truth_value_p (TREE_CODE (arg1)))
12440 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12441 fold_convert (type, arg0),
12444 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12445 if (integer_onep (op2)
12446 && truth_value_p (TREE_CODE (arg0))
12447 && truth_value_p (TREE_CODE (arg1)))
12449 /* Only perform transformation if ARG0 is easily inverted. */
12450 tem = fold_truth_not_expr (arg0);
12452 return fold_build2 (TRUTH_ORIF_EXPR, type,
12453 fold_convert (type, tem),
12457 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12458 if (integer_zerop (arg1)
12459 && truth_value_p (TREE_CODE (arg0))
12460 && truth_value_p (TREE_CODE (op2)))
12462 /* Only perform transformation if ARG0 is easily inverted. */
12463 tem = fold_truth_not_expr (arg0);
12465 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12466 fold_convert (type, tem),
12470 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12471 if (integer_onep (arg1)
12472 && truth_value_p (TREE_CODE (arg0))
12473 && truth_value_p (TREE_CODE (op2)))
12474 return fold_build2 (TRUTH_ORIF_EXPR, type,
12475 fold_convert (type, arg0),
12481 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12482 of fold_ternary on them. */
12483 gcc_unreachable ();
12485 case BIT_FIELD_REF:
12486 if ((TREE_CODE (arg0) == VECTOR_CST
12487 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12488 && type == TREE_TYPE (TREE_TYPE (arg0))
12489 && host_integerp (arg1, 1)
12490 && host_integerp (op2, 1))
12492 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12493 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12496 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12497 && (idx % width) == 0
12498 && (idx = idx / width)
12499 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12501 tree elements = NULL_TREE;
12503 if (TREE_CODE (arg0) == VECTOR_CST)
12504 elements = TREE_VECTOR_CST_ELTS (arg0);
12507 unsigned HOST_WIDE_INT idx;
12510 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12511 elements = tree_cons (NULL_TREE, value, elements);
12513 while (idx-- > 0 && elements)
12514 elements = TREE_CHAIN (elements);
12516 return TREE_VALUE (elements);
12518 return fold_convert (type, integer_zero_node);
12525 } /* switch (code) */
12528 /* Perform constant folding and related simplification of EXPR.
12529 The related simplifications include x*1 => x, x*0 => 0, etc.,
12530 and application of the associative law.
12531 NOP_EXPR conversions may be removed freely (as long as we
12532 are careful not to change the type of the overall expression).
12533 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12534 but we can constant-fold them if they have constant operands. */
12536 #ifdef ENABLE_FOLD_CHECKING
12537 # define fold(x) fold_1 (x)
12538 static tree fold_1 (tree);
12544 const tree t = expr;
12545 enum tree_code code = TREE_CODE (t);
12546 enum tree_code_class kind = TREE_CODE_CLASS (code);
12549 /* Return right away if a constant. */
12550 if (kind == tcc_constant)
12553 /* CALL_EXPR-like objects with variable numbers of operands are
12554 treated specially. */
12555 if (kind == tcc_vl_exp)
12557 if (code == CALL_EXPR)
12559 tem = fold_call_expr (expr, false);
12560 return tem ? tem : expr;
12565 if (IS_EXPR_CODE_CLASS (kind)
12566 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12568 tree type = TREE_TYPE (t);
12569 tree op0, op1, op2;
12571 switch (TREE_CODE_LENGTH (code))
12574 op0 = TREE_OPERAND (t, 0);
12575 tem = fold_unary (code, type, op0);
12576 return tem ? tem : expr;
12578 op0 = TREE_OPERAND (t, 0);
12579 op1 = TREE_OPERAND (t, 1);
12580 tem = fold_binary (code, type, op0, op1);
12581 return tem ? tem : expr;
12583 op0 = TREE_OPERAND (t, 0);
12584 op1 = TREE_OPERAND (t, 1);
12585 op2 = TREE_OPERAND (t, 2);
12586 tem = fold_ternary (code, type, op0, op1, op2);
12587 return tem ? tem : expr;
12596 return fold (DECL_INITIAL (t));
12600 } /* switch (code) */
12603 #ifdef ENABLE_FOLD_CHECKING
12606 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12607 static void fold_check_failed (tree, tree);
12608 void print_fold_checksum (tree);
12610 /* When --enable-checking=fold, compute a digest of expr before
12611 and after actual fold call to see if fold did not accidentally
12612 change original expr. */
12618 struct md5_ctx ctx;
12619 unsigned char checksum_before[16], checksum_after[16];
12622 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12623 md5_init_ctx (&ctx);
12624 fold_checksum_tree (expr, &ctx, ht);
12625 md5_finish_ctx (&ctx, checksum_before);
12628 ret = fold_1 (expr);
12630 md5_init_ctx (&ctx);
12631 fold_checksum_tree (expr, &ctx, ht);
12632 md5_finish_ctx (&ctx, checksum_after);
12635 if (memcmp (checksum_before, checksum_after, 16))
12636 fold_check_failed (expr, ret);
12642 print_fold_checksum (tree expr)
12644 struct md5_ctx ctx;
12645 unsigned char checksum[16], cnt;
12648 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12649 md5_init_ctx (&ctx);
12650 fold_checksum_tree (expr, &ctx, ht);
12651 md5_finish_ctx (&ctx, checksum);
12653 for (cnt = 0; cnt < 16; ++cnt)
12654 fprintf (stderr, "%02x", checksum[cnt]);
12655 putc ('\n', stderr);
12659 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12661 internal_error ("fold check: original tree changed by fold");
12665 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12668 enum tree_code code;
12669 struct tree_function_decl buf;
12674 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12675 <= sizeof (struct tree_function_decl))
12676 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12679 slot = htab_find_slot (ht, expr, INSERT);
12683 code = TREE_CODE (expr);
12684 if (TREE_CODE_CLASS (code) == tcc_declaration
12685 && DECL_ASSEMBLER_NAME_SET_P (expr))
12687 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12688 memcpy ((char *) &buf, expr, tree_size (expr));
12689 expr = (tree) &buf;
12690 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12692 else if (TREE_CODE_CLASS (code) == tcc_type
12693 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12694 || TYPE_CACHED_VALUES_P (expr)
12695 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12697 /* Allow these fields to be modified. */
12698 memcpy ((char *) &buf, expr, tree_size (expr));
12699 expr = (tree) &buf;
12700 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12701 TYPE_POINTER_TO (expr) = NULL;
12702 TYPE_REFERENCE_TO (expr) = NULL;
12703 if (TYPE_CACHED_VALUES_P (expr))
12705 TYPE_CACHED_VALUES_P (expr) = 0;
12706 TYPE_CACHED_VALUES (expr) = NULL;
12709 md5_process_bytes (expr, tree_size (expr), ctx);
12710 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12711 if (TREE_CODE_CLASS (code) != tcc_type
12712 && TREE_CODE_CLASS (code) != tcc_declaration
12713 && code != TREE_LIST)
12714 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12715 switch (TREE_CODE_CLASS (code))
12721 md5_process_bytes (TREE_STRING_POINTER (expr),
12722 TREE_STRING_LENGTH (expr), ctx);
12725 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12726 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12729 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12735 case tcc_exceptional:
12739 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12740 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12741 expr = TREE_CHAIN (expr);
12742 goto recursive_label;
12745 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12746 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12752 case tcc_expression:
12753 case tcc_reference:
12754 case tcc_comparison:
12757 case tcc_statement:
12759 len = TREE_OPERAND_LENGTH (expr);
12760 for (i = 0; i < len; ++i)
12761 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12763 case tcc_declaration:
12764 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12765 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12766 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12768 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12769 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12770 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12771 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12772 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12774 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12775 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12777 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12779 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12780 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12781 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12785 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12786 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12787 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12788 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12789 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12790 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12791 if (INTEGRAL_TYPE_P (expr)
12792 || SCALAR_FLOAT_TYPE_P (expr))
12794 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12795 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12797 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12798 if (TREE_CODE (expr) == RECORD_TYPE
12799 || TREE_CODE (expr) == UNION_TYPE
12800 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12801 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12802 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12811 /* Fold a unary tree expression with code CODE of type TYPE with an
12812 operand OP0. Return a folded expression if successful. Otherwise,
12813 return a tree expression with code CODE of type TYPE with an
12817 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12820 #ifdef ENABLE_FOLD_CHECKING
12821 unsigned char checksum_before[16], checksum_after[16];
12822 struct md5_ctx ctx;
12825 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12826 md5_init_ctx (&ctx);
12827 fold_checksum_tree (op0, &ctx, ht);
12828 md5_finish_ctx (&ctx, checksum_before);
12832 tem = fold_unary (code, type, op0);
12834 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12836 #ifdef ENABLE_FOLD_CHECKING
12837 md5_init_ctx (&ctx);
12838 fold_checksum_tree (op0, &ctx, ht);
12839 md5_finish_ctx (&ctx, checksum_after);
12842 if (memcmp (checksum_before, checksum_after, 16))
12843 fold_check_failed (op0, tem);
12848 /* Fold a binary tree expression with code CODE of type TYPE with
12849 operands OP0 and OP1. Return a folded expression if successful.
12850 Otherwise, return a tree expression with code CODE of type TYPE
12851 with operands OP0 and OP1. */
12854 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12858 #ifdef ENABLE_FOLD_CHECKING
12859 unsigned char checksum_before_op0[16],
12860 checksum_before_op1[16],
12861 checksum_after_op0[16],
12862 checksum_after_op1[16];
12863 struct md5_ctx ctx;
12866 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12867 md5_init_ctx (&ctx);
12868 fold_checksum_tree (op0, &ctx, ht);
12869 md5_finish_ctx (&ctx, checksum_before_op0);
12872 md5_init_ctx (&ctx);
12873 fold_checksum_tree (op1, &ctx, ht);
12874 md5_finish_ctx (&ctx, checksum_before_op1);
12878 tem = fold_binary (code, type, op0, op1);
12880 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12882 #ifdef ENABLE_FOLD_CHECKING
12883 md5_init_ctx (&ctx);
12884 fold_checksum_tree (op0, &ctx, ht);
12885 md5_finish_ctx (&ctx, checksum_after_op0);
12888 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12889 fold_check_failed (op0, tem);
12891 md5_init_ctx (&ctx);
12892 fold_checksum_tree (op1, &ctx, ht);
12893 md5_finish_ctx (&ctx, checksum_after_op1);
12896 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12897 fold_check_failed (op1, tem);
12902 /* Fold a ternary tree expression with code CODE of type TYPE with
12903 operands OP0, OP1, and OP2. Return a folded expression if
12904 successful. Otherwise, return a tree expression with code CODE of
12905 type TYPE with operands OP0, OP1, and OP2. */
12908 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12912 #ifdef ENABLE_FOLD_CHECKING
12913 unsigned char checksum_before_op0[16],
12914 checksum_before_op1[16],
12915 checksum_before_op2[16],
12916 checksum_after_op0[16],
12917 checksum_after_op1[16],
12918 checksum_after_op2[16];
12919 struct md5_ctx ctx;
12922 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12923 md5_init_ctx (&ctx);
12924 fold_checksum_tree (op0, &ctx, ht);
12925 md5_finish_ctx (&ctx, checksum_before_op0);
12928 md5_init_ctx (&ctx);
12929 fold_checksum_tree (op1, &ctx, ht);
12930 md5_finish_ctx (&ctx, checksum_before_op1);
12933 md5_init_ctx (&ctx);
12934 fold_checksum_tree (op2, &ctx, ht);
12935 md5_finish_ctx (&ctx, checksum_before_op2);
12939 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12940 tem = fold_ternary (code, type, op0, op1, op2);
12942 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12944 #ifdef ENABLE_FOLD_CHECKING
12945 md5_init_ctx (&ctx);
12946 fold_checksum_tree (op0, &ctx, ht);
12947 md5_finish_ctx (&ctx, checksum_after_op0);
12950 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12951 fold_check_failed (op0, tem);
12953 md5_init_ctx (&ctx);
12954 fold_checksum_tree (op1, &ctx, ht);
12955 md5_finish_ctx (&ctx, checksum_after_op1);
12958 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12959 fold_check_failed (op1, tem);
12961 md5_init_ctx (&ctx);
12962 fold_checksum_tree (op2, &ctx, ht);
12963 md5_finish_ctx (&ctx, checksum_after_op2);
12966 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12967 fold_check_failed (op2, tem);
12972 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12973 arguments in ARGARRAY, and a null static chain.
12974 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12975 of type TYPE from the given operands as constructed by build_call_array. */
12978 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
12981 #ifdef ENABLE_FOLD_CHECKING
12982 unsigned char checksum_before_fn[16],
12983 checksum_before_arglist[16],
12984 checksum_after_fn[16],
12985 checksum_after_arglist[16];
12986 struct md5_ctx ctx;
12990 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12991 md5_init_ctx (&ctx);
12992 fold_checksum_tree (fn, &ctx, ht);
12993 md5_finish_ctx (&ctx, checksum_before_fn);
12996 md5_init_ctx (&ctx);
12997 for (i = 0; i < nargs; i++)
12998 fold_checksum_tree (argarray[i], &ctx, ht);
12999 md5_finish_ctx (&ctx, checksum_before_arglist);
13003 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13005 #ifdef ENABLE_FOLD_CHECKING
13006 md5_init_ctx (&ctx);
13007 fold_checksum_tree (fn, &ctx, ht);
13008 md5_finish_ctx (&ctx, checksum_after_fn);
13011 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13012 fold_check_failed (fn, tem);
13014 md5_init_ctx (&ctx);
13015 for (i = 0; i < nargs; i++)
13016 fold_checksum_tree (argarray[i], &ctx, ht);
13017 md5_finish_ctx (&ctx, checksum_after_arglist);
13020 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13021 fold_check_failed (NULL_TREE, tem);
13026 /* Perform constant folding and related simplification of initializer
13027 expression EXPR. These behave identically to "fold_buildN" but ignore
13028 potential run-time traps and exceptions that fold must preserve. */
13030 #define START_FOLD_INIT \
13031 int saved_signaling_nans = flag_signaling_nans;\
13032 int saved_trapping_math = flag_trapping_math;\
13033 int saved_rounding_math = flag_rounding_math;\
13034 int saved_trapv = flag_trapv;\
13035 int saved_folding_initializer = folding_initializer;\
13036 flag_signaling_nans = 0;\
13037 flag_trapping_math = 0;\
13038 flag_rounding_math = 0;\
13040 folding_initializer = 1;
13042 #define END_FOLD_INIT \
13043 flag_signaling_nans = saved_signaling_nans;\
13044 flag_trapping_math = saved_trapping_math;\
13045 flag_rounding_math = saved_rounding_math;\
13046 flag_trapv = saved_trapv;\
13047 folding_initializer = saved_folding_initializer;
13050 fold_build1_initializer (enum tree_code code, tree type, tree op)
13055 result = fold_build1 (code, type, op);
13062 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13067 result = fold_build2 (code, type, op0, op1);
13074 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13080 result = fold_build3 (code, type, op0, op1, op2);
13087 fold_build_call_array_initializer (tree type, tree fn,
13088 int nargs, tree *argarray)
13093 result = fold_build_call_array (type, fn, nargs, argarray);
13099 #undef START_FOLD_INIT
13100 #undef END_FOLD_INIT
13102 /* Determine if first argument is a multiple of second argument. Return 0 if
13103 it is not, or we cannot easily determined it to be.
13105 An example of the sort of thing we care about (at this point; this routine
13106 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13107 fold cases do now) is discovering that
13109 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13115 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13117 This code also handles discovering that
13119 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13121 is a multiple of 8 so we don't have to worry about dealing with a
13122 possible remainder.
13124 Note that we *look* inside a SAVE_EXPR only to determine how it was
13125 calculated; it is not safe for fold to do much of anything else with the
13126 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13127 at run time. For example, the latter example above *cannot* be implemented
13128 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13129 evaluation time of the original SAVE_EXPR is not necessarily the same at
13130 the time the new expression is evaluated. The only optimization of this
13131 sort that would be valid is changing
13133 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13137 SAVE_EXPR (I) * SAVE_EXPR (J)
13139 (where the same SAVE_EXPR (J) is used in the original and the
13140 transformed version). */
13143 multiple_of_p (tree type, tree top, tree bottom)
13145 if (operand_equal_p (top, bottom, 0))
13148 if (TREE_CODE (type) != INTEGER_TYPE)
13151 switch (TREE_CODE (top))
13154 /* Bitwise and provides a power of two multiple. If the mask is
13155 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13156 if (!integer_pow2p (bottom))
13161 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13162 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13166 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13167 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13170 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13174 op1 = TREE_OPERAND (top, 1);
13175 /* const_binop may not detect overflow correctly,
13176 so check for it explicitly here. */
13177 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13178 > TREE_INT_CST_LOW (op1)
13179 && TREE_INT_CST_HIGH (op1) == 0
13180 && 0 != (t1 = fold_convert (type,
13181 const_binop (LSHIFT_EXPR,
13184 && !TREE_OVERFLOW (t1))
13185 return multiple_of_p (type, t1, bottom);
13190 /* Can't handle conversions from non-integral or wider integral type. */
13191 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13192 || (TYPE_PRECISION (type)
13193 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13196 /* .. fall through ... */
13199 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13202 if (TREE_CODE (bottom) != INTEGER_CST
13203 || (TYPE_UNSIGNED (type)
13204 && (tree_int_cst_sgn (top) < 0
13205 || tree_int_cst_sgn (bottom) < 0)))
13207 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13215 /* Return true if `t' is known to be non-negative. If the return
13216 value is based on the assumption that signed overflow is undefined,
13217 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13218 *STRICT_OVERFLOW_P. */
13221 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13223 if (t == error_mark_node)
13226 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13229 switch (TREE_CODE (t))
13232 /* Query VRP to see if it has recorded any information about
13233 the range of this object. */
13234 return ssa_name_nonnegative_p (t);
13237 /* We can't return 1 if flag_wrapv is set because
13238 ABS_EXPR<INT_MIN> = INT_MIN. */
13239 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13241 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13243 *strict_overflow_p = true;
13249 return tree_int_cst_sgn (t) >= 0;
13252 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13255 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13256 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13258 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13259 strict_overflow_p));
13261 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13262 both unsigned and at least 2 bits shorter than the result. */
13263 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13264 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13265 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13267 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13268 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13269 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13270 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13272 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13273 TYPE_PRECISION (inner2)) + 1;
13274 return prec < TYPE_PRECISION (TREE_TYPE (t));
13280 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13282 /* x * x for floating point x is always non-negative. */
13283 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13285 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13287 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13288 strict_overflow_p));
13291 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13292 both unsigned and their total bits is shorter than the result. */
13293 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13294 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13295 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13297 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13298 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13299 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13300 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13301 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13302 < TYPE_PRECISION (TREE_TYPE (t));
13308 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13310 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13311 strict_overflow_p));
13317 case TRUNC_DIV_EXPR:
13318 case CEIL_DIV_EXPR:
13319 case FLOOR_DIV_EXPR:
13320 case ROUND_DIV_EXPR:
13321 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13323 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13324 strict_overflow_p));
13326 case TRUNC_MOD_EXPR:
13327 case CEIL_MOD_EXPR:
13328 case FLOOR_MOD_EXPR:
13329 case ROUND_MOD_EXPR:
13331 case NON_LVALUE_EXPR:
13333 case FIX_TRUNC_EXPR:
13334 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13335 strict_overflow_p);
13337 case COMPOUND_EXPR:
13339 case GIMPLE_MODIFY_STMT:
13340 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13341 strict_overflow_p);
13344 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13345 strict_overflow_p);
13348 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13350 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13351 strict_overflow_p));
13355 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13356 tree outer_type = TREE_TYPE (t);
13358 if (TREE_CODE (outer_type) == REAL_TYPE)
13360 if (TREE_CODE (inner_type) == REAL_TYPE)
13361 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13362 strict_overflow_p);
13363 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13365 if (TYPE_UNSIGNED (inner_type))
13367 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13368 strict_overflow_p);
13371 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13373 if (TREE_CODE (inner_type) == REAL_TYPE)
13374 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13375 strict_overflow_p);
13376 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13377 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13378 && TYPE_UNSIGNED (inner_type);
13385 tree temp = TARGET_EXPR_SLOT (t);
13386 t = TARGET_EXPR_INITIAL (t);
13388 /* If the initializer is non-void, then it's a normal expression
13389 that will be assigned to the slot. */
13390 if (!VOID_TYPE_P (t))
13391 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13393 /* Otherwise, the initializer sets the slot in some way. One common
13394 way is an assignment statement at the end of the initializer. */
13397 if (TREE_CODE (t) == BIND_EXPR)
13398 t = expr_last (BIND_EXPR_BODY (t));
13399 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13400 || TREE_CODE (t) == TRY_CATCH_EXPR)
13401 t = expr_last (TREE_OPERAND (t, 0));
13402 else if (TREE_CODE (t) == STATEMENT_LIST)
13407 if ((TREE_CODE (t) == MODIFY_EXPR
13408 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13409 && GENERIC_TREE_OPERAND (t, 0) == temp)
13410 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13411 strict_overflow_p);
13418 tree fndecl = get_callee_fndecl (t);
13419 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13420 switch (DECL_FUNCTION_CODE (fndecl))
13422 CASE_FLT_FN (BUILT_IN_ACOS):
13423 CASE_FLT_FN (BUILT_IN_ACOSH):
13424 CASE_FLT_FN (BUILT_IN_CABS):
13425 CASE_FLT_FN (BUILT_IN_COSH):
13426 CASE_FLT_FN (BUILT_IN_ERFC):
13427 CASE_FLT_FN (BUILT_IN_EXP):
13428 CASE_FLT_FN (BUILT_IN_EXP10):
13429 CASE_FLT_FN (BUILT_IN_EXP2):
13430 CASE_FLT_FN (BUILT_IN_FABS):
13431 CASE_FLT_FN (BUILT_IN_FDIM):
13432 CASE_FLT_FN (BUILT_IN_HYPOT):
13433 CASE_FLT_FN (BUILT_IN_POW10):
13434 CASE_INT_FN (BUILT_IN_FFS):
13435 CASE_INT_FN (BUILT_IN_PARITY):
13436 CASE_INT_FN (BUILT_IN_POPCOUNT):
13437 case BUILT_IN_BSWAP32:
13438 case BUILT_IN_BSWAP64:
13442 CASE_FLT_FN (BUILT_IN_SQRT):
13443 /* sqrt(-0.0) is -0.0. */
13444 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13446 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13447 strict_overflow_p);
13449 CASE_FLT_FN (BUILT_IN_ASINH):
13450 CASE_FLT_FN (BUILT_IN_ATAN):
13451 CASE_FLT_FN (BUILT_IN_ATANH):
13452 CASE_FLT_FN (BUILT_IN_CBRT):
13453 CASE_FLT_FN (BUILT_IN_CEIL):
13454 CASE_FLT_FN (BUILT_IN_ERF):
13455 CASE_FLT_FN (BUILT_IN_EXPM1):
13456 CASE_FLT_FN (BUILT_IN_FLOOR):
13457 CASE_FLT_FN (BUILT_IN_FMOD):
13458 CASE_FLT_FN (BUILT_IN_FREXP):
13459 CASE_FLT_FN (BUILT_IN_LCEIL):
13460 CASE_FLT_FN (BUILT_IN_LDEXP):
13461 CASE_FLT_FN (BUILT_IN_LFLOOR):
13462 CASE_FLT_FN (BUILT_IN_LLCEIL):
13463 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13464 CASE_FLT_FN (BUILT_IN_LLRINT):
13465 CASE_FLT_FN (BUILT_IN_LLROUND):
13466 CASE_FLT_FN (BUILT_IN_LRINT):
13467 CASE_FLT_FN (BUILT_IN_LROUND):
13468 CASE_FLT_FN (BUILT_IN_MODF):
13469 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13470 CASE_FLT_FN (BUILT_IN_RINT):
13471 CASE_FLT_FN (BUILT_IN_ROUND):
13472 CASE_FLT_FN (BUILT_IN_SCALB):
13473 CASE_FLT_FN (BUILT_IN_SCALBLN):
13474 CASE_FLT_FN (BUILT_IN_SCALBN):
13475 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13476 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13477 CASE_FLT_FN (BUILT_IN_SINH):
13478 CASE_FLT_FN (BUILT_IN_TANH):
13479 CASE_FLT_FN (BUILT_IN_TRUNC):
13480 /* True if the 1st argument is nonnegative. */
13481 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13482 strict_overflow_p);
13484 CASE_FLT_FN (BUILT_IN_FMAX):
13485 /* True if the 1st OR 2nd arguments are nonnegative. */
13486 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13488 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13489 strict_overflow_p)));
13491 CASE_FLT_FN (BUILT_IN_FMIN):
13492 /* True if the 1st AND 2nd arguments are nonnegative. */
13493 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13495 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13496 strict_overflow_p)));
13498 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13499 /* True if the 2nd argument is nonnegative. */
13500 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13501 strict_overflow_p);
13503 CASE_FLT_FN (BUILT_IN_POWI):
13504 /* True if the 1st argument is nonnegative or the second
13505 argument is an even integer. */
13506 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13508 tree arg1 = CALL_EXPR_ARG (t, 1);
13509 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13512 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13513 strict_overflow_p);
13515 CASE_FLT_FN (BUILT_IN_POW):
13516 /* True if the 1st argument is nonnegative or the second
13517 argument is an even integer valued real. */
13518 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13523 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13524 n = real_to_integer (&c);
13527 REAL_VALUE_TYPE cint;
13528 real_from_integer (&cint, VOIDmode, n,
13529 n < 0 ? -1 : 0, 0);
13530 if (real_identical (&c, &cint))
13534 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13535 strict_overflow_p);
13542 /* ... fall through ... */
13545 if (truth_value_p (TREE_CODE (t)))
13546 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13550 /* We don't know sign of `t', so be conservative and return false. */
13554 /* Return true if `t' is known to be non-negative. Handle warnings
13555 about undefined signed overflow. */
13558 tree_expr_nonnegative_p (tree t)
13560 bool ret, strict_overflow_p;
13562 strict_overflow_p = false;
13563 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13564 if (strict_overflow_p)
13565 fold_overflow_warning (("assuming signed overflow does not occur when "
13566 "determining that expression is always "
13568 WARN_STRICT_OVERFLOW_MISC);
13572 /* Return true when T is an address and is known to be nonzero.
13573 For floating point we further ensure that T is not denormal.
13574 Similar logic is present in nonzero_address in rtlanal.h.
13576 If the return value is based on the assumption that signed overflow
13577 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13578 change *STRICT_OVERFLOW_P. */
13581 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13583 tree type = TREE_TYPE (t);
13584 bool sub_strict_overflow_p;
13586 /* Doing something useful for floating point would need more work. */
13587 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13590 switch (TREE_CODE (t))
13593 /* Query VRP to see if it has recorded any information about
13594 the range of this object. */
13595 return ssa_name_nonzero_p (t);
13598 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13599 strict_overflow_p);
13602 return !integer_zerop (t);
13605 if (TYPE_OVERFLOW_UNDEFINED (type))
13607 /* With the presence of negative values it is hard
13608 to say something. */
13609 sub_strict_overflow_p = false;
13610 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13611 &sub_strict_overflow_p)
13612 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13613 &sub_strict_overflow_p))
13615 /* One of operands must be positive and the other non-negative. */
13616 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13617 overflows, on a twos-complement machine the sum of two
13618 nonnegative numbers can never be zero. */
13619 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13621 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13622 strict_overflow_p));
13627 if (TYPE_OVERFLOW_UNDEFINED (type))
13629 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13631 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13632 strict_overflow_p))
13634 *strict_overflow_p = true;
13642 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13643 tree outer_type = TREE_TYPE (t);
13645 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13646 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13647 strict_overflow_p));
13653 tree base = get_base_address (TREE_OPERAND (t, 0));
13658 /* Weak declarations may link to NULL. */
13659 if (VAR_OR_FUNCTION_DECL_P (base))
13660 return !DECL_WEAK (base);
13662 /* Constants are never weak. */
13663 if (CONSTANT_CLASS_P (base))
13670 sub_strict_overflow_p = false;
13671 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13672 &sub_strict_overflow_p)
13673 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13674 &sub_strict_overflow_p))
13676 if (sub_strict_overflow_p)
13677 *strict_overflow_p = true;
13683 sub_strict_overflow_p = false;
13684 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13685 &sub_strict_overflow_p)
13686 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13687 &sub_strict_overflow_p))
13689 if (sub_strict_overflow_p)
13690 *strict_overflow_p = true;
13695 sub_strict_overflow_p = false;
13696 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13697 &sub_strict_overflow_p))
13699 if (sub_strict_overflow_p)
13700 *strict_overflow_p = true;
13702 /* When both operands are nonzero, then MAX must be too. */
13703 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13704 strict_overflow_p))
13707 /* MAX where operand 0 is positive is positive. */
13708 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13709 strict_overflow_p);
13711 /* MAX where operand 1 is positive is positive. */
13712 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13713 &sub_strict_overflow_p)
13714 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13715 &sub_strict_overflow_p))
13717 if (sub_strict_overflow_p)
13718 *strict_overflow_p = true;
13723 case COMPOUND_EXPR:
13725 case GIMPLE_MODIFY_STMT:
13727 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13728 strict_overflow_p);
13731 case NON_LVALUE_EXPR:
13732 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13733 strict_overflow_p);
13736 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13738 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13739 strict_overflow_p));
13742 return alloca_call_p (t);
13750 /* Return true when T is an address and is known to be nonzero.
13751 Handle warnings about undefined signed overflow. */
13754 tree_expr_nonzero_p (tree t)
13756 bool ret, strict_overflow_p;
13758 strict_overflow_p = false;
13759 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13760 if (strict_overflow_p)
13761 fold_overflow_warning (("assuming signed overflow does not occur when "
13762 "determining that expression is always "
13764 WARN_STRICT_OVERFLOW_MISC);
13768 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13769 attempt to fold the expression to a constant without modifying TYPE,
13772 If the expression could be simplified to a constant, then return
13773 the constant. If the expression would not be simplified to a
13774 constant, then return NULL_TREE. */
13777 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13779 tree tem = fold_binary (code, type, op0, op1);
13780 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13783 /* Given the components of a unary expression CODE, TYPE and OP0,
13784 attempt to fold the expression to a constant without modifying
13787 If the expression could be simplified to a constant, then return
13788 the constant. If the expression would not be simplified to a
13789 constant, then return NULL_TREE. */
13792 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13794 tree tem = fold_unary (code, type, op0);
13795 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13798 /* If EXP represents referencing an element in a constant string
13799 (either via pointer arithmetic or array indexing), return the
13800 tree representing the value accessed, otherwise return NULL. */
13803 fold_read_from_constant_string (tree exp)
13805 if ((TREE_CODE (exp) == INDIRECT_REF
13806 || TREE_CODE (exp) == ARRAY_REF)
13807 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13809 tree exp1 = TREE_OPERAND (exp, 0);
13813 if (TREE_CODE (exp) == INDIRECT_REF)
13814 string = string_constant (exp1, &index);
13817 tree low_bound = array_ref_low_bound (exp);
13818 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13820 /* Optimize the special-case of a zero lower bound.
13822 We convert the low_bound to sizetype to avoid some problems
13823 with constant folding. (E.g. suppose the lower bound is 1,
13824 and its mode is QI. Without the conversion,l (ARRAY
13825 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13826 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13827 if (! integer_zerop (low_bound))
13828 index = size_diffop (index, fold_convert (sizetype, low_bound));
13834 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13835 && TREE_CODE (string) == STRING_CST
13836 && TREE_CODE (index) == INTEGER_CST
13837 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13838 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13840 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13841 return fold_convert (TREE_TYPE (exp),
13842 build_int_cst (NULL_TREE,
13843 (TREE_STRING_POINTER (string)
13844 [TREE_INT_CST_LOW (index)])));
13849 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13850 an integer constant or real constant.
13852 TYPE is the type of the result. */
13855 fold_negate_const (tree arg0, tree type)
13857 tree t = NULL_TREE;
13859 switch (TREE_CODE (arg0))
13863 unsigned HOST_WIDE_INT low;
13864 HOST_WIDE_INT high;
13865 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13866 TREE_INT_CST_HIGH (arg0),
13868 t = force_fit_type_double (type, low, high, 1,
13869 (overflow | TREE_OVERFLOW (arg0))
13870 && !TYPE_UNSIGNED (type));
13875 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13879 gcc_unreachable ();
13885 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13886 an integer constant or real constant.
13888 TYPE is the type of the result. */
13891 fold_abs_const (tree arg0, tree type)
13893 tree t = NULL_TREE;
13895 switch (TREE_CODE (arg0))
13898 /* If the value is unsigned, then the absolute value is
13899 the same as the ordinary value. */
13900 if (TYPE_UNSIGNED (type))
13902 /* Similarly, if the value is non-negative. */
13903 else if (INT_CST_LT (integer_minus_one_node, arg0))
13905 /* If the value is negative, then the absolute value is
13909 unsigned HOST_WIDE_INT low;
13910 HOST_WIDE_INT high;
13911 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13912 TREE_INT_CST_HIGH (arg0),
13914 t = force_fit_type_double (type, low, high, -1,
13915 overflow | TREE_OVERFLOW (arg0));
13920 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13921 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13927 gcc_unreachable ();
13933 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13934 constant. TYPE is the type of the result. */
13937 fold_not_const (tree arg0, tree type)
13939 tree t = NULL_TREE;
13941 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13943 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13944 ~TREE_INT_CST_HIGH (arg0), 0,
13945 TREE_OVERFLOW (arg0));
13950 /* Given CODE, a relational operator, the target type, TYPE and two
13951 constant operands OP0 and OP1, return the result of the
13952 relational operation. If the result is not a compile time
13953 constant, then return NULL_TREE. */
13956 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13958 int result, invert;
13960 /* From here on, the only cases we handle are when the result is
13961 known to be a constant. */
13963 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13965 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13966 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13968 /* Handle the cases where either operand is a NaN. */
13969 if (real_isnan (c0) || real_isnan (c1))
13979 case UNORDERED_EXPR:
13993 if (flag_trapping_math)
13999 gcc_unreachable ();
14002 return constant_boolean_node (result, type);
14005 return constant_boolean_node (real_compare (code, c0, c1), type);
14008 /* Handle equality/inequality of complex constants. */
14009 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14011 tree rcond = fold_relational_const (code, type,
14012 TREE_REALPART (op0),
14013 TREE_REALPART (op1));
14014 tree icond = fold_relational_const (code, type,
14015 TREE_IMAGPART (op0),
14016 TREE_IMAGPART (op1));
14017 if (code == EQ_EXPR)
14018 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14019 else if (code == NE_EXPR)
14020 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14025 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14027 To compute GT, swap the arguments and do LT.
14028 To compute GE, do LT and invert the result.
14029 To compute LE, swap the arguments, do LT and invert the result.
14030 To compute NE, do EQ and invert the result.
14032 Therefore, the code below must handle only EQ and LT. */
14034 if (code == LE_EXPR || code == GT_EXPR)
14039 code = swap_tree_comparison (code);
14042 /* Note that it is safe to invert for real values here because we
14043 have already handled the one case that it matters. */
14046 if (code == NE_EXPR || code == GE_EXPR)
14049 code = invert_tree_comparison (code, false);
14052 /* Compute a result for LT or EQ if args permit;
14053 Otherwise return T. */
14054 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14056 if (code == EQ_EXPR)
14057 result = tree_int_cst_equal (op0, op1);
14058 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14059 result = INT_CST_LT_UNSIGNED (op0, op1);
14061 result = INT_CST_LT (op0, op1);
14068 return constant_boolean_node (result, type);
14071 /* Build an expression for the a clean point containing EXPR with type TYPE.
14072 Don't build a cleanup point expression for EXPR which don't have side
14076 fold_build_cleanup_point_expr (tree type, tree expr)
14078 /* If the expression does not have side effects then we don't have to wrap
14079 it with a cleanup point expression. */
14080 if (!TREE_SIDE_EFFECTS (expr))
14083 /* If the expression is a return, check to see if the expression inside the
14084 return has no side effects or the right hand side of the modify expression
14085 inside the return. If either don't have side effects set we don't need to
14086 wrap the expression in a cleanup point expression. Note we don't check the
14087 left hand side of the modify because it should always be a return decl. */
14088 if (TREE_CODE (expr) == RETURN_EXPR)
14090 tree op = TREE_OPERAND (expr, 0);
14091 if (!op || !TREE_SIDE_EFFECTS (op))
14093 op = TREE_OPERAND (op, 1);
14094 if (!TREE_SIDE_EFFECTS (op))
14098 return build1 (CLEANUP_POINT_EXPR, type, expr);
14101 /* Build an expression for the address of T. Folds away INDIRECT_REF to
14102 avoid confusing the gimplify process. */
14105 build_fold_addr_expr_with_type (tree t, tree ptrtype)
14107 /* The size of the object is not relevant when talking about its address. */
14108 if (TREE_CODE (t) == WITH_SIZE_EXPR)
14109 t = TREE_OPERAND (t, 0);
14111 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
14112 if (TREE_CODE (t) == INDIRECT_REF
14113 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
14115 t = TREE_OPERAND (t, 0);
14116 if (TREE_TYPE (t) != ptrtype)
14117 t = build1 (NOP_EXPR, ptrtype, t);
14123 while (handled_component_p (base))
14124 base = TREE_OPERAND (base, 0);
14126 TREE_ADDRESSABLE (base) = 1;
14128 t = build1 (ADDR_EXPR, ptrtype, t);
14135 build_fold_addr_expr (tree t)
14137 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
14140 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14141 of an indirection through OP0, or NULL_TREE if no simplification is
14145 fold_indirect_ref_1 (tree type, tree op0)
14151 subtype = TREE_TYPE (sub);
14152 if (!POINTER_TYPE_P (subtype))
14155 if (TREE_CODE (sub) == ADDR_EXPR)
14157 tree op = TREE_OPERAND (sub, 0);
14158 tree optype = TREE_TYPE (op);
14159 /* *&CONST_DECL -> to the value of the const decl. */
14160 if (TREE_CODE (op) == CONST_DECL)
14161 return DECL_INITIAL (op);
14162 /* *&p => p; make sure to handle *&"str"[cst] here. */
14163 if (type == optype)
14165 tree fop = fold_read_from_constant_string (op);
14171 /* *(foo *)&fooarray => fooarray[0] */
14172 else if (TREE_CODE (optype) == ARRAY_TYPE
14173 && type == TREE_TYPE (optype))
14175 tree type_domain = TYPE_DOMAIN (optype);
14176 tree min_val = size_zero_node;
14177 if (type_domain && TYPE_MIN_VALUE (type_domain))
14178 min_val = TYPE_MIN_VALUE (type_domain);
14179 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14181 /* *(foo *)&complexfoo => __real__ complexfoo */
14182 else if (TREE_CODE (optype) == COMPLEX_TYPE
14183 && type == TREE_TYPE (optype))
14184 return fold_build1 (REALPART_EXPR, type, op);
14185 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14186 else if (TREE_CODE (optype) == VECTOR_TYPE
14187 && type == TREE_TYPE (optype))
14189 tree part_width = TYPE_SIZE (type);
14190 tree index = bitsize_int (0);
14191 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14195 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14196 if (TREE_CODE (sub) == PLUS_EXPR
14197 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14199 tree op00 = TREE_OPERAND (sub, 0);
14200 tree op01 = TREE_OPERAND (sub, 1);
14204 op00type = TREE_TYPE (op00);
14205 if (TREE_CODE (op00) == ADDR_EXPR
14206 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14207 && type == TREE_TYPE (TREE_TYPE (op00type)))
14209 tree size = TYPE_SIZE_UNIT (type);
14210 if (tree_int_cst_equal (size, op01))
14211 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14215 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14216 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14217 && type == TREE_TYPE (TREE_TYPE (subtype)))
14220 tree min_val = size_zero_node;
14221 sub = build_fold_indirect_ref (sub);
14222 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14223 if (type_domain && TYPE_MIN_VALUE (type_domain))
14224 min_val = TYPE_MIN_VALUE (type_domain);
14225 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14231 /* Builds an expression for an indirection through T, simplifying some
14235 build_fold_indirect_ref (tree t)
14237 tree type = TREE_TYPE (TREE_TYPE (t));
14238 tree sub = fold_indirect_ref_1 (type, t);
14243 return build1 (INDIRECT_REF, type, t);
14246 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14249 fold_indirect_ref (tree t)
14251 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14259 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14260 whose result is ignored. The type of the returned tree need not be
14261 the same as the original expression. */
14264 fold_ignored_result (tree t)
14266 if (!TREE_SIDE_EFFECTS (t))
14267 return integer_zero_node;
14270 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14273 t = TREE_OPERAND (t, 0);
14277 case tcc_comparison:
14278 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14279 t = TREE_OPERAND (t, 0);
14280 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14281 t = TREE_OPERAND (t, 1);
14286 case tcc_expression:
14287 switch (TREE_CODE (t))
14289 case COMPOUND_EXPR:
14290 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14292 t = TREE_OPERAND (t, 0);
14296 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14297 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14299 t = TREE_OPERAND (t, 0);
14312 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14313 This can only be applied to objects of a sizetype. */
14316 round_up (tree value, int divisor)
14318 tree div = NULL_TREE;
14320 gcc_assert (divisor > 0);
14324 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14325 have to do anything. Only do this when we are not given a const,
14326 because in that case, this check is more expensive than just
14328 if (TREE_CODE (value) != INTEGER_CST)
14330 div = build_int_cst (TREE_TYPE (value), divisor);
14332 if (multiple_of_p (TREE_TYPE (value), value, div))
14336 /* If divisor is a power of two, simplify this to bit manipulation. */
14337 if (divisor == (divisor & -divisor))
14339 if (TREE_CODE (value) == INTEGER_CST)
14341 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14342 unsigned HOST_WIDE_INT high;
14345 if ((low & (divisor - 1)) == 0)
14348 overflow_p = TREE_OVERFLOW (value);
14349 high = TREE_INT_CST_HIGH (value);
14350 low &= ~(divisor - 1);
14359 return force_fit_type_double (TREE_TYPE (value), low, high,
14366 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14367 value = size_binop (PLUS_EXPR, value, t);
14368 t = build_int_cst (TREE_TYPE (value), -divisor);
14369 value = size_binop (BIT_AND_EXPR, value, t);
14375 div = build_int_cst (TREE_TYPE (value), divisor);
14376 value = size_binop (CEIL_DIV_EXPR, value, div);
14377 value = size_binop (MULT_EXPR, value, div);
14383 /* Likewise, but round down. */
14386 round_down (tree value, int divisor)
14388 tree div = NULL_TREE;
14390 gcc_assert (divisor > 0);
14394 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14395 have to do anything. Only do this when we are not given a const,
14396 because in that case, this check is more expensive than just
14398 if (TREE_CODE (value) != INTEGER_CST)
14400 div = build_int_cst (TREE_TYPE (value), divisor);
14402 if (multiple_of_p (TREE_TYPE (value), value, div))
14406 /* If divisor is a power of two, simplify this to bit manipulation. */
14407 if (divisor == (divisor & -divisor))
14411 t = build_int_cst (TREE_TYPE (value), -divisor);
14412 value = size_binop (BIT_AND_EXPR, value, t);
14417 div = build_int_cst (TREE_TYPE (value), divisor);
14418 value = size_binop (FLOOR_DIV_EXPR, value, div);
14419 value = size_binop (MULT_EXPR, value, div);
14425 /* Returns the pointer to the base of the object addressed by EXP and
14426 extracts the information about the offset of the access, storing it
14427 to PBITPOS and POFFSET. */
14430 split_address_to_core_and_offset (tree exp,
14431 HOST_WIDE_INT *pbitpos, tree *poffset)
14434 enum machine_mode mode;
14435 int unsignedp, volatilep;
14436 HOST_WIDE_INT bitsize;
14438 if (TREE_CODE (exp) == ADDR_EXPR)
14440 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14441 poffset, &mode, &unsignedp, &volatilep,
14443 core = build_fold_addr_expr (core);
14449 *poffset = NULL_TREE;
14455 /* Returns true if addresses of E1 and E2 differ by a constant, false
14456 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14459 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14462 HOST_WIDE_INT bitpos1, bitpos2;
14463 tree toffset1, toffset2, tdiff, type;
14465 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14466 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14468 if (bitpos1 % BITS_PER_UNIT != 0
14469 || bitpos2 % BITS_PER_UNIT != 0
14470 || !operand_equal_p (core1, core2, 0))
14473 if (toffset1 && toffset2)
14475 type = TREE_TYPE (toffset1);
14476 if (type != TREE_TYPE (toffset2))
14477 toffset2 = fold_convert (type, toffset2);
14479 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14480 if (!cst_and_fits_in_hwi (tdiff))
14483 *diff = int_cst_value (tdiff);
14485 else if (toffset1 || toffset2)
14487 /* If only one of the offsets is non-constant, the difference cannot
14494 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14498 /* Simplify the floating point expression EXP when the sign of the
14499 result is not significant. Return NULL_TREE if no simplification
14503 fold_strip_sign_ops (tree exp)
14507 switch (TREE_CODE (exp))
14511 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14512 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14516 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14518 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14519 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14520 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14521 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14522 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14523 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14526 case COMPOUND_EXPR:
14527 arg0 = TREE_OPERAND (exp, 0);
14528 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14530 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14534 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14535 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14537 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14538 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14539 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14544 const enum built_in_function fcode = builtin_mathfn_code (exp);
14547 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14548 /* Strip copysign function call, return the 1st argument. */
14549 arg0 = CALL_EXPR_ARG (exp, 0);
14550 arg1 = CALL_EXPR_ARG (exp, 1);
14551 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14554 /* Strip sign ops from the argument of "odd" math functions. */
14555 if (negate_mathfn_p (fcode))
14557 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14559 return build_call_expr (get_callee_fndecl (exp), 1, arg0);