1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
65 #include "langhooks.h"
68 /* Non-zero if we are folding constants inside an initializer; zero
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static int all_ones_mask_p (tree, int);
118 static tree sign_bit_p (tree, tree);
119 static int simple_operand_p (tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
208 int sign_extended_type;
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
214 prec = TYPE_PRECISION (type);
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)l1 < 0)
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
287 int sign_extended_type;
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
295 overflow = fit_double_type (low, high, &low, &high, type);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
302 || (overflowable > 0 && sign_extended_type))
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
329 unsigned HOST_WIDE_INT l;
333 h = h1 + h2 + (l < l1);
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
357 return (*hv & h1) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
395 for (j = 0; j < 4; j++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
449 if (SHIFT_COUNT_TRUNCATED)
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 unsigned HOST_WIDE_INT signmask;
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 if (SHIFT_COUNT_TRUNCATED)
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
707 { /* scale divisor and dividend */
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
786 decode (quo, lquo, hquo);
789 /* If result is negative, make it so. */
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, <wice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
901 return build_int_cst_wide (type, quol, quoh);
904 /* This is non-zero if we should defer warnings about undefined
905 overflow. This facility exists because these warnings are a
906 special case. The code to estimate loop iterations does not want
907 to issue any warnings, since it works with expressions which do not
908 occur in user code. Various bits of cleanup code call fold(), but
909 only use the result if it has certain characteristics (e.g., is a
910 constant); that code only wants to issue a warning if the result is
913 static int fold_deferring_overflow_warnings;
915 /* If a warning about undefined overflow is deferred, this is the
916 warning. Note that this may cause us to turn two warnings into
917 one, but that is fine since it is sufficient to only give one
918 warning per expression. */
920 static const char* fold_deferred_overflow_warning;
922 /* If a warning about undefined overflow is deferred, this is the
923 level at which the warning should be emitted. */
925 static enum warn_strict_overflow_code fold_deferred_overflow_code;
927 /* Start deferring overflow warnings. We could use a stack here to
928 permit nested calls, but at present it is not necessary. */
931 fold_defer_overflow_warnings (void)
933 ++fold_deferring_overflow_warnings;
936 /* Stop deferring overflow warnings. If there is a pending warning,
937 and ISSUE is true, then issue the warning if appropriate. STMT is
938 the statement with which the warning should be associated (used for
939 location information); STMT may be NULL. CODE is the level of the
940 warning--a warn_strict_overflow_code value. This function will use
941 the smaller of CODE and the deferred code when deciding whether to
942 issue the warning. CODE may be zero to mean to always use the
946 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
951 gcc_assert (fold_deferring_overflow_warnings > 0);
952 --fold_deferring_overflow_warnings;
953 if (fold_deferring_overflow_warnings > 0)
955 if (fold_deferred_overflow_warning != NULL
957 && code < (int) fold_deferred_overflow_code)
958 fold_deferred_overflow_code = code;
962 warnmsg = fold_deferred_overflow_warning;
963 fold_deferred_overflow_warning = NULL;
965 if (!issue || warnmsg == NULL)
968 /* Use the smallest code level when deciding to issue the
970 if (code == 0 || code > (int) fold_deferred_overflow_code)
971 code = fold_deferred_overflow_code;
973 if (!issue_strict_overflow_warning (code))
976 if (stmt == NULL_TREE || !expr_has_location (stmt))
977 locus = input_location;
979 locus = expr_location (stmt);
980 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
983 /* Stop deferring overflow warnings, ignoring any deferred
987 fold_undefer_and_ignore_overflow_warnings (void)
989 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
992 /* Whether we are deferring overflow warnings. */
995 fold_deferring_overflow_warnings_p (void)
997 return fold_deferring_overflow_warnings > 0;
1000 /* This is called when we fold something based on the fact that signed
1001 overflow is undefined. */
1004 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1006 gcc_assert (!flag_wrapv && !flag_trapv);
1007 if (fold_deferring_overflow_warnings > 0)
1009 if (fold_deferred_overflow_warning == NULL
1010 || wc < fold_deferred_overflow_code)
1012 fold_deferred_overflow_warning = gmsgid;
1013 fold_deferred_overflow_code = wc;
1016 else if (issue_strict_overflow_warning (wc))
1017 warning (OPT_Wstrict_overflow, gmsgid);
1020 /* Return true if the built-in mathematical function specified by CODE
1021 is odd, i.e. -f(x) == f(-x). */
1024 negate_mathfn_p (enum built_in_function code)
1028 CASE_FLT_FN (BUILT_IN_ASIN):
1029 CASE_FLT_FN (BUILT_IN_ASINH):
1030 CASE_FLT_FN (BUILT_IN_ATAN):
1031 CASE_FLT_FN (BUILT_IN_ATANH):
1032 CASE_FLT_FN (BUILT_IN_CASIN):
1033 CASE_FLT_FN (BUILT_IN_CASINH):
1034 CASE_FLT_FN (BUILT_IN_CATAN):
1035 CASE_FLT_FN (BUILT_IN_CATANH):
1036 CASE_FLT_FN (BUILT_IN_CBRT):
1037 CASE_FLT_FN (BUILT_IN_CPROJ):
1038 CASE_FLT_FN (BUILT_IN_CSIN):
1039 CASE_FLT_FN (BUILT_IN_CSINH):
1040 CASE_FLT_FN (BUILT_IN_CTAN):
1041 CASE_FLT_FN (BUILT_IN_CTANH):
1042 CASE_FLT_FN (BUILT_IN_ERF):
1043 CASE_FLT_FN (BUILT_IN_LLROUND):
1044 CASE_FLT_FN (BUILT_IN_LROUND):
1045 CASE_FLT_FN (BUILT_IN_ROUND):
1046 CASE_FLT_FN (BUILT_IN_SIN):
1047 CASE_FLT_FN (BUILT_IN_SINH):
1048 CASE_FLT_FN (BUILT_IN_TAN):
1049 CASE_FLT_FN (BUILT_IN_TANH):
1050 CASE_FLT_FN (BUILT_IN_TRUNC):
1053 CASE_FLT_FN (BUILT_IN_LLRINT):
1054 CASE_FLT_FN (BUILT_IN_LRINT):
1055 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1056 CASE_FLT_FN (BUILT_IN_RINT):
1057 return !flag_rounding_math;
1065 /* Check whether we may negate an integer constant T without causing
1069 may_negate_without_overflow_p (tree t)
1071 unsigned HOST_WIDE_INT val;
1075 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1077 type = TREE_TYPE (t);
1078 if (TYPE_UNSIGNED (type))
1081 prec = TYPE_PRECISION (type);
1082 if (prec > HOST_BITS_PER_WIDE_INT)
1084 if (TREE_INT_CST_LOW (t) != 0)
1086 prec -= HOST_BITS_PER_WIDE_INT;
1087 val = TREE_INT_CST_HIGH (t);
1090 val = TREE_INT_CST_LOW (t);
1091 if (prec < HOST_BITS_PER_WIDE_INT)
1092 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1093 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1096 /* Determine whether an expression T can be cheaply negated using
1097 the function negate_expr without introducing undefined overflow. */
1100 negate_expr_p (tree t)
1107 type = TREE_TYPE (t);
1109 STRIP_SIGN_NOPS (t);
1110 switch (TREE_CODE (t))
1113 if (TYPE_OVERFLOW_WRAPS (type))
1116 /* Check that -CST will not overflow type. */
1117 return may_negate_without_overflow_p (t);
1119 return (INTEGRAL_TYPE_P (type)
1120 && TYPE_OVERFLOW_WRAPS (type));
1127 return negate_expr_p (TREE_REALPART (t))
1128 && negate_expr_p (TREE_IMAGPART (t));
1131 return negate_expr_p (TREE_OPERAND (t, 0))
1132 && negate_expr_p (TREE_OPERAND (t, 1));
1135 return negate_expr_p (TREE_OPERAND (t, 0));
1138 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1139 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1141 /* -(A + B) -> (-B) - A. */
1142 if (negate_expr_p (TREE_OPERAND (t, 1))
1143 && reorder_operands_p (TREE_OPERAND (t, 0),
1144 TREE_OPERAND (t, 1)))
1146 /* -(A + B) -> (-A) - B. */
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1150 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1151 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1152 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1));
1157 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1163 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1164 return negate_expr_p (TREE_OPERAND (t, 1))
1165 || negate_expr_p (TREE_OPERAND (t, 0));
1168 case TRUNC_DIV_EXPR:
1169 case ROUND_DIV_EXPR:
1170 case FLOOR_DIV_EXPR:
1172 case EXACT_DIV_EXPR:
1173 /* In general we can't negate A / B, because if A is INT_MIN and
1174 B is 1, we may turn this into INT_MIN / -1 which is undefined
1175 and actually traps on some architectures. But if overflow is
1176 undefined, we can negate, because - (INT_MIN / 1) is an
1178 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1179 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1181 return negate_expr_p (TREE_OPERAND (t, 1))
1182 || negate_expr_p (TREE_OPERAND (t, 0));
1185 /* Negate -((double)float) as (double)(-float). */
1186 if (TREE_CODE (type) == REAL_TYPE)
1188 tree tem = strip_float_extensions (t);
1190 return negate_expr_p (tem);
1195 /* Negate -f(x) as f(-x). */
1196 if (negate_mathfn_p (builtin_mathfn_code (t)))
1197 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1201 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1202 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1204 tree op1 = TREE_OPERAND (t, 1);
1205 if (TREE_INT_CST_HIGH (op1) == 0
1206 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1207 == TREE_INT_CST_LOW (op1))
1218 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1219 simplification is possible.
1220 If negate_expr_p would return true for T, NULL_TREE will never be
1224 fold_negate_expr (tree t)
1226 tree type = TREE_TYPE (t);
1229 switch (TREE_CODE (t))
1231 /* Convert - (~A) to A + 1. */
1233 if (INTEGRAL_TYPE_P (type))
1234 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1235 build_int_cst (type, 1));
1239 tem = fold_negate_const (t, type);
1240 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1241 || !TYPE_OVERFLOW_TRAPS (type))
1246 tem = fold_negate_const (t, type);
1247 /* Two's complement FP formats, such as c4x, may overflow. */
1248 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1254 tree rpart = negate_expr (TREE_REALPART (t));
1255 tree ipart = negate_expr (TREE_IMAGPART (t));
1257 if ((TREE_CODE (rpart) == REAL_CST
1258 && TREE_CODE (ipart) == REAL_CST)
1259 || (TREE_CODE (rpart) == INTEGER_CST
1260 && TREE_CODE (ipart) == INTEGER_CST))
1261 return build_complex (type, rpart, ipart);
1266 if (negate_expr_p (t))
1267 return fold_build2 (COMPLEX_EXPR, type,
1268 fold_negate_expr (TREE_OPERAND (t, 0)),
1269 fold_negate_expr (TREE_OPERAND (t, 1)));
1273 if (negate_expr_p (t))
1274 return fold_build1 (CONJ_EXPR, type,
1275 fold_negate_expr (TREE_OPERAND (t, 0)));
1279 return TREE_OPERAND (t, 0);
1282 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1283 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1285 /* -(A + B) -> (-B) - A. */
1286 if (negate_expr_p (TREE_OPERAND (t, 1))
1287 && reorder_operands_p (TREE_OPERAND (t, 0),
1288 TREE_OPERAND (t, 1)))
1290 tem = negate_expr (TREE_OPERAND (t, 1));
1291 return fold_build2 (MINUS_EXPR, type,
1292 tem, TREE_OPERAND (t, 0));
1295 /* -(A + B) -> (-A) - B. */
1296 if (negate_expr_p (TREE_OPERAND (t, 0)))
1298 tem = negate_expr (TREE_OPERAND (t, 0));
1299 return fold_build2 (MINUS_EXPR, type,
1300 tem, TREE_OPERAND (t, 1));
1306 /* - (A - B) -> B - A */
1307 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1308 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1309 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1310 return fold_build2 (MINUS_EXPR, type,
1311 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1315 if (TYPE_UNSIGNED (type))
1321 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1323 tem = TREE_OPERAND (t, 1);
1324 if (negate_expr_p (tem))
1325 return fold_build2 (TREE_CODE (t), type,
1326 TREE_OPERAND (t, 0), negate_expr (tem));
1327 tem = TREE_OPERAND (t, 0);
1328 if (negate_expr_p (tem))
1329 return fold_build2 (TREE_CODE (t), type,
1330 negate_expr (tem), TREE_OPERAND (t, 1));
1334 case TRUNC_DIV_EXPR:
1335 case ROUND_DIV_EXPR:
1336 case FLOOR_DIV_EXPR:
1338 case EXACT_DIV_EXPR:
1339 /* In general we can't negate A / B, because if A is INT_MIN and
1340 B is 1, we may turn this into INT_MIN / -1 which is undefined
1341 and actually traps on some architectures. But if overflow is
1342 undefined, we can negate, because - (INT_MIN / 1) is an
1344 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1346 const char * const warnmsg = G_("assuming signed overflow does not "
1347 "occur when negating a division");
1348 tem = TREE_OPERAND (t, 1);
1349 if (negate_expr_p (tem))
1351 if (INTEGRAL_TYPE_P (type)
1352 && (TREE_CODE (tem) != INTEGER_CST
1353 || integer_onep (tem)))
1354 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1355 return fold_build2 (TREE_CODE (t), type,
1356 TREE_OPERAND (t, 0), negate_expr (tem));
1358 tem = TREE_OPERAND (t, 0);
1359 if (negate_expr_p (tem))
1361 if (INTEGRAL_TYPE_P (type)
1362 && (TREE_CODE (tem) != INTEGER_CST
1363 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1364 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1365 return fold_build2 (TREE_CODE (t), type,
1366 negate_expr (tem), TREE_OPERAND (t, 1));
1372 /* Convert -((double)float) into (double)(-float). */
1373 if (TREE_CODE (type) == REAL_TYPE)
1375 tem = strip_float_extensions (t);
1376 if (tem != t && negate_expr_p (tem))
1377 return negate_expr (tem);
1382 /* Negate -f(x) as f(-x). */
1383 if (negate_mathfn_p (builtin_mathfn_code (t))
1384 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1388 fndecl = get_callee_fndecl (t);
1389 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1390 return build_call_expr (fndecl, 1, arg);
1395 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1396 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1398 tree op1 = TREE_OPERAND (t, 1);
1399 if (TREE_INT_CST_HIGH (op1) == 0
1400 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1401 == TREE_INT_CST_LOW (op1))
1403 tree ntype = TYPE_UNSIGNED (type)
1404 ? lang_hooks.types.signed_type (type)
1405 : lang_hooks.types.unsigned_type (type);
1406 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1407 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1408 return fold_convert (type, temp);
1420 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1421 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1422 return NULL_TREE. */
1425 negate_expr (tree t)
1432 type = TREE_TYPE (t);
1433 STRIP_SIGN_NOPS (t);
1435 tem = fold_negate_expr (t);
1437 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1438 return fold_convert (type, tem);
1441 /* Split a tree IN into a constant, literal and variable parts that could be
1442 combined with CODE to make IN. "constant" means an expression with
1443 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1444 commutative arithmetic operation. Store the constant part into *CONP,
1445 the literal in *LITP and return the variable part. If a part isn't
1446 present, set it to null. If the tree does not decompose in this way,
1447 return the entire tree as the variable part and the other parts as null.
1449 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1450 case, we negate an operand that was subtracted. Except if it is a
1451 literal for which we use *MINUS_LITP instead.
1453 If NEGATE_P is true, we are negating all of IN, again except a literal
1454 for which we use *MINUS_LITP instead.
1456 If IN is itself a literal or constant, return it as appropriate.
1458 Note that we do not guarantee that any of the three values will be the
1459 same type as IN, but they will have the same signedness and mode. */
1462 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1463 tree *minus_litp, int negate_p)
1471 /* Strip any conversions that don't change the machine mode or signedness. */
1472 STRIP_SIGN_NOPS (in);
1474 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1476 else if (TREE_CODE (in) == code
1477 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1478 /* We can associate addition and subtraction together (even
1479 though the C standard doesn't say so) for integers because
1480 the value is not affected. For reals, the value might be
1481 affected, so we can't. */
1482 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1483 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1485 tree op0 = TREE_OPERAND (in, 0);
1486 tree op1 = TREE_OPERAND (in, 1);
1487 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1488 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1490 /* First see if either of the operands is a literal, then a constant. */
1491 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1492 *litp = op0, op0 = 0;
1493 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1494 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1496 if (op0 != 0 && TREE_CONSTANT (op0))
1497 *conp = op0, op0 = 0;
1498 else if (op1 != 0 && TREE_CONSTANT (op1))
1499 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1501 /* If we haven't dealt with either operand, this is not a case we can
1502 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1503 if (op0 != 0 && op1 != 0)
1508 var = op1, neg_var_p = neg1_p;
1510 /* Now do any needed negations. */
1512 *minus_litp = *litp, *litp = 0;
1514 *conp = negate_expr (*conp);
1516 var = negate_expr (var);
1518 else if (TREE_CONSTANT (in))
1526 *minus_litp = *litp, *litp = 0;
1527 else if (*minus_litp)
1528 *litp = *minus_litp, *minus_litp = 0;
1529 *conp = negate_expr (*conp);
1530 var = negate_expr (var);
1536 /* Re-associate trees split by the above function. T1 and T2 are either
1537 expressions to associate or null. Return the new expression, if any. If
1538 we build an operation, do it in TYPE and with CODE. */
1541 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1548 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1549 try to fold this since we will have infinite recursion. But do
1550 deal with any NEGATE_EXPRs. */
1551 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1552 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1554 if (code == PLUS_EXPR)
1556 if (TREE_CODE (t1) == NEGATE_EXPR)
1557 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1558 fold_convert (type, TREE_OPERAND (t1, 0)));
1559 else if (TREE_CODE (t2) == NEGATE_EXPR)
1560 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1561 fold_convert (type, TREE_OPERAND (t2, 0)));
1562 else if (integer_zerop (t2))
1563 return fold_convert (type, t1);
1565 else if (code == MINUS_EXPR)
1567 if (integer_zerop (t2))
1568 return fold_convert (type, t1);
1571 return build2 (code, type, fold_convert (type, t1),
1572 fold_convert (type, t2));
1575 return fold_build2 (code, type, fold_convert (type, t1),
1576 fold_convert (type, t2));
1579 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1580 for use in int_const_binop, size_binop and size_diffop. */
1583 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1585 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1587 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1602 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1603 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1604 && TYPE_MODE (type1) == TYPE_MODE (type2);
1608 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1609 to produce a new constant. Return NULL_TREE if we don't know how
1610 to evaluate CODE at compile-time.
1612 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1615 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1617 unsigned HOST_WIDE_INT int1l, int2l;
1618 HOST_WIDE_INT int1h, int2h;
1619 unsigned HOST_WIDE_INT low;
1621 unsigned HOST_WIDE_INT garbagel;
1622 HOST_WIDE_INT garbageh;
1624 tree type = TREE_TYPE (arg1);
1625 int uns = TYPE_UNSIGNED (type);
1627 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1630 int1l = TREE_INT_CST_LOW (arg1);
1631 int1h = TREE_INT_CST_HIGH (arg1);
1632 int2l = TREE_INT_CST_LOW (arg2);
1633 int2h = TREE_INT_CST_HIGH (arg2);
1638 low = int1l | int2l, hi = int1h | int2h;
1642 low = int1l ^ int2l, hi = int1h ^ int2h;
1646 low = int1l & int2l, hi = int1h & int2h;
1652 /* It's unclear from the C standard whether shifts can overflow.
1653 The following code ignores overflow; perhaps a C standard
1654 interpretation ruling is needed. */
1655 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1662 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1667 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1671 neg_double (int2l, int2h, &low, &hi);
1672 add_double (int1l, int1h, low, hi, &low, &hi);
1673 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1677 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1680 case TRUNC_DIV_EXPR:
1681 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1682 case EXACT_DIV_EXPR:
1683 /* This is a shortcut for a common special case. */
1684 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1685 && !TREE_OVERFLOW (arg1)
1686 && !TREE_OVERFLOW (arg2)
1687 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1689 if (code == CEIL_DIV_EXPR)
1692 low = int1l / int2l, hi = 0;
1696 /* ... fall through ... */
1698 case ROUND_DIV_EXPR:
1699 if (int2h == 0 && int2l == 0)
1701 if (int2h == 0 && int2l == 1)
1703 low = int1l, hi = int1h;
1706 if (int1l == int2l && int1h == int2h
1707 && ! (int1l == 0 && int1h == 0))
1712 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1713 &low, &hi, &garbagel, &garbageh);
1716 case TRUNC_MOD_EXPR:
1717 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1718 /* This is a shortcut for a common special case. */
1719 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1720 && !TREE_OVERFLOW (arg1)
1721 && !TREE_OVERFLOW (arg2)
1722 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1724 if (code == CEIL_MOD_EXPR)
1726 low = int1l % int2l, hi = 0;
1730 /* ... fall through ... */
1732 case ROUND_MOD_EXPR:
1733 if (int2h == 0 && int2l == 0)
1735 overflow = div_and_round_double (code, uns,
1736 int1l, int1h, int2l, int2h,
1737 &garbagel, &garbageh, &low, &hi);
1743 low = (((unsigned HOST_WIDE_INT) int1h
1744 < (unsigned HOST_WIDE_INT) int2h)
1745 || (((unsigned HOST_WIDE_INT) int1h
1746 == (unsigned HOST_WIDE_INT) int2h)
1749 low = (int1h < int2h
1750 || (int1h == int2h && int1l < int2l));
1752 if (low == (code == MIN_EXPR))
1753 low = int1l, hi = int1h;
1755 low = int2l, hi = int2h;
1764 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1766 /* Propagate overflow flags ourselves. */
1767 if (((!uns || is_sizetype) && overflow)
1768 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1771 TREE_OVERFLOW (t) = 1;
1775 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1776 ((!uns || is_sizetype) && overflow)
1777 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1782 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1783 constant. We assume ARG1 and ARG2 have the same data type, or at least
1784 are the same kind of constant and the same machine mode. Return zero if
1785 combining the constants is not allowed in the current operating mode.
1787 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1790 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1792 /* Sanity check for the recursive cases. */
1799 if (TREE_CODE (arg1) == INTEGER_CST)
1800 return int_const_binop (code, arg1, arg2, notrunc);
1802 if (TREE_CODE (arg1) == REAL_CST)
1804 enum machine_mode mode;
1807 REAL_VALUE_TYPE value;
1808 REAL_VALUE_TYPE result;
1812 /* The following codes are handled by real_arithmetic. */
1827 d1 = TREE_REAL_CST (arg1);
1828 d2 = TREE_REAL_CST (arg2);
1830 type = TREE_TYPE (arg1);
1831 mode = TYPE_MODE (type);
1833 /* Don't perform operation if we honor signaling NaNs and
1834 either operand is a NaN. */
1835 if (HONOR_SNANS (mode)
1836 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1839 /* Don't perform operation if it would raise a division
1840 by zero exception. */
1841 if (code == RDIV_EXPR
1842 && REAL_VALUES_EQUAL (d2, dconst0)
1843 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1846 /* If either operand is a NaN, just return it. Otherwise, set up
1847 for floating-point trap; we return an overflow. */
1848 if (REAL_VALUE_ISNAN (d1))
1850 else if (REAL_VALUE_ISNAN (d2))
1853 inexact = real_arithmetic (&value, code, &d1, &d2);
1854 real_convert (&result, mode, &value);
1856 /* Don't constant fold this floating point operation if
1857 the result has overflowed and flag_trapping_math. */
1858 if (flag_trapping_math
1859 && MODE_HAS_INFINITIES (mode)
1860 && REAL_VALUE_ISINF (result)
1861 && !REAL_VALUE_ISINF (d1)
1862 && !REAL_VALUE_ISINF (d2))
1865 /* Don't constant fold this floating point operation if the
1866 result may dependent upon the run-time rounding mode and
1867 flag_rounding_math is set, or if GCC's software emulation
1868 is unable to accurately represent the result. */
1869 if ((flag_rounding_math
1870 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1871 && !flag_unsafe_math_optimizations))
1872 && (inexact || !real_identical (&result, &value)))
1875 t = build_real (type, result);
1877 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1881 if (TREE_CODE (arg1) == COMPLEX_CST)
1883 tree type = TREE_TYPE (arg1);
1884 tree r1 = TREE_REALPART (arg1);
1885 tree i1 = TREE_IMAGPART (arg1);
1886 tree r2 = TREE_REALPART (arg2);
1887 tree i2 = TREE_IMAGPART (arg2);
1894 real = const_binop (code, r1, r2, notrunc);
1895 imag = const_binop (code, i1, i2, notrunc);
1899 real = const_binop (MINUS_EXPR,
1900 const_binop (MULT_EXPR, r1, r2, notrunc),
1901 const_binop (MULT_EXPR, i1, i2, notrunc),
1903 imag = const_binop (PLUS_EXPR,
1904 const_binop (MULT_EXPR, r1, i2, notrunc),
1905 const_binop (MULT_EXPR, i1, r2, notrunc),
1912 = const_binop (PLUS_EXPR,
1913 const_binop (MULT_EXPR, r2, r2, notrunc),
1914 const_binop (MULT_EXPR, i2, i2, notrunc),
1917 = const_binop (PLUS_EXPR,
1918 const_binop (MULT_EXPR, r1, r2, notrunc),
1919 const_binop (MULT_EXPR, i1, i2, notrunc),
1922 = const_binop (MINUS_EXPR,
1923 const_binop (MULT_EXPR, i1, r2, notrunc),
1924 const_binop (MULT_EXPR, r1, i2, notrunc),
1927 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1928 code = TRUNC_DIV_EXPR;
1930 real = const_binop (code, t1, magsquared, notrunc);
1931 imag = const_binop (code, t2, magsquared, notrunc);
1940 return build_complex (type, real, imag);
1946 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1947 indicates which particular sizetype to create. */
1950 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1952 return build_int_cst (sizetype_tab[(int) kind], number);
1955 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1956 is a tree code. The type of the result is taken from the operands.
1957 Both must be equivalent integer types, ala int_binop_types_match_p.
1958 If the operands are constant, so is the result. */
1961 size_binop (enum tree_code code, tree arg0, tree arg1)
1963 tree type = TREE_TYPE (arg0);
1965 if (arg0 == error_mark_node || arg1 == error_mark_node)
1966 return error_mark_node;
1968 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1971 /* Handle the special case of two integer constants faster. */
1972 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1974 /* And some specific cases even faster than that. */
1975 if (code == PLUS_EXPR)
1977 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1979 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1982 else if (code == MINUS_EXPR)
1984 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1987 else if (code == MULT_EXPR)
1989 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1993 /* Handle general case of two integer constants. */
1994 return int_const_binop (code, arg0, arg1, 0);
1997 return fold_build2 (code, type, arg0, arg1);
2000 /* Given two values, either both of sizetype or both of bitsizetype,
2001 compute the difference between the two values. Return the value
2002 in signed type corresponding to the type of the operands. */
2005 size_diffop (tree arg0, tree arg1)
2007 tree type = TREE_TYPE (arg0);
2010 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2013 /* If the type is already signed, just do the simple thing. */
2014 if (!TYPE_UNSIGNED (type))
2015 return size_binop (MINUS_EXPR, arg0, arg1);
2017 if (type == sizetype)
2019 else if (type == bitsizetype)
2020 ctype = sbitsizetype;
2022 ctype = lang_hooks.types.signed_type (type);
2024 /* If either operand is not a constant, do the conversions to the signed
2025 type and subtract. The hardware will do the right thing with any
2026 overflow in the subtraction. */
2027 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2028 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2029 fold_convert (ctype, arg1));
2031 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2032 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2033 overflow) and negate (which can't either). Special-case a result
2034 of zero while we're here. */
2035 if (tree_int_cst_equal (arg0, arg1))
2036 return build_int_cst (ctype, 0);
2037 else if (tree_int_cst_lt (arg1, arg0))
2038 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2040 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2041 fold_convert (ctype, size_binop (MINUS_EXPR,
2045 /* A subroutine of fold_convert_const handling conversions of an
2046 INTEGER_CST to another integer type. */
2049 fold_convert_const_int_from_int (tree type, tree arg1)
2053 /* Given an integer constant, make new constant with new type,
2054 appropriately sign-extended or truncated. */
2055 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2056 TREE_INT_CST_HIGH (arg1),
2057 /* Don't set the overflow when
2058 converting a pointer */
2059 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2060 (TREE_INT_CST_HIGH (arg1) < 0
2061 && (TYPE_UNSIGNED (type)
2062 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2063 | TREE_OVERFLOW (arg1));
2068 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2069 to an integer type. */
2072 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2077 /* The following code implements the floating point to integer
2078 conversion rules required by the Java Language Specification,
2079 that IEEE NaNs are mapped to zero and values that overflow
2080 the target precision saturate, i.e. values greater than
2081 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2082 are mapped to INT_MIN. These semantics are allowed by the
2083 C and C++ standards that simply state that the behavior of
2084 FP-to-integer conversion is unspecified upon overflow. */
2086 HOST_WIDE_INT high, low;
2088 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2092 case FIX_TRUNC_EXPR:
2093 real_trunc (&r, VOIDmode, &x);
2100 /* If R is NaN, return zero and show we have an overflow. */
2101 if (REAL_VALUE_ISNAN (r))
2108 /* See if R is less than the lower bound or greater than the
2113 tree lt = TYPE_MIN_VALUE (type);
2114 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2115 if (REAL_VALUES_LESS (r, l))
2118 high = TREE_INT_CST_HIGH (lt);
2119 low = TREE_INT_CST_LOW (lt);
2125 tree ut = TYPE_MAX_VALUE (type);
2128 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2129 if (REAL_VALUES_LESS (u, r))
2132 high = TREE_INT_CST_HIGH (ut);
2133 low = TREE_INT_CST_LOW (ut);
2139 REAL_VALUE_TO_INT (&low, &high, r);
2141 t = force_fit_type_double (type, low, high, -1,
2142 overflow | TREE_OVERFLOW (arg1));
2146 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2147 to another floating point type. */
2150 fold_convert_const_real_from_real (tree type, tree arg1)
2152 REAL_VALUE_TYPE value;
2155 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2156 t = build_real (type, value);
2158 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2162 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2163 type TYPE. If no simplification can be done return NULL_TREE. */
2166 fold_convert_const (enum tree_code code, tree type, tree arg1)
2168 if (TREE_TYPE (arg1) == type)
2171 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2173 if (TREE_CODE (arg1) == INTEGER_CST)
2174 return fold_convert_const_int_from_int (type, arg1);
2175 else if (TREE_CODE (arg1) == REAL_CST)
2176 return fold_convert_const_int_from_real (code, type, arg1);
2178 else if (TREE_CODE (type) == REAL_TYPE)
2180 if (TREE_CODE (arg1) == INTEGER_CST)
2181 return build_real_from_int_cst (type, arg1);
2182 if (TREE_CODE (arg1) == REAL_CST)
2183 return fold_convert_const_real_from_real (type, arg1);
2188 /* Construct a vector of zero elements of vector type TYPE. */
2191 build_zero_vector (tree type)
2196 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2197 units = TYPE_VECTOR_SUBPARTS (type);
2200 for (i = 0; i < units; i++)
2201 list = tree_cons (NULL_TREE, elem, list);
2202 return build_vector (type, list);
2205 /* Convert expression ARG to type TYPE. Used by the middle-end for
2206 simple conversions in preference to calling the front-end's convert. */
2209 fold_convert (tree type, tree arg)
2211 tree orig = TREE_TYPE (arg);
2217 if (TREE_CODE (arg) == ERROR_MARK
2218 || TREE_CODE (type) == ERROR_MARK
2219 || TREE_CODE (orig) == ERROR_MARK)
2220 return error_mark_node;
2222 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2223 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2224 TYPE_MAIN_VARIANT (orig)))
2225 return fold_build1 (NOP_EXPR, type, arg);
2227 switch (TREE_CODE (type))
2229 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2230 case POINTER_TYPE: case REFERENCE_TYPE:
2232 if (TREE_CODE (arg) == INTEGER_CST)
2234 tem = fold_convert_const (NOP_EXPR, type, arg);
2235 if (tem != NULL_TREE)
2238 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2239 || TREE_CODE (orig) == OFFSET_TYPE)
2240 return fold_build1 (NOP_EXPR, type, arg);
2241 if (TREE_CODE (orig) == COMPLEX_TYPE)
2243 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2244 return fold_convert (type, tem);
2246 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2247 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2248 return fold_build1 (NOP_EXPR, type, arg);
2251 if (TREE_CODE (arg) == INTEGER_CST)
2253 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2257 else if (TREE_CODE (arg) == REAL_CST)
2259 tem = fold_convert_const (NOP_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2264 switch (TREE_CODE (orig))
2267 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2268 case POINTER_TYPE: case REFERENCE_TYPE:
2269 return fold_build1 (FLOAT_EXPR, type, arg);
2272 return fold_build1 (NOP_EXPR, type, arg);
2275 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2276 return fold_convert (type, tem);
2283 switch (TREE_CODE (orig))
2286 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2287 case POINTER_TYPE: case REFERENCE_TYPE:
2289 return build2 (COMPLEX_EXPR, type,
2290 fold_convert (TREE_TYPE (type), arg),
2291 fold_convert (TREE_TYPE (type), integer_zero_node));
2296 if (TREE_CODE (arg) == COMPLEX_EXPR)
2298 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2299 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2300 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2303 arg = save_expr (arg);
2304 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2305 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2306 rpart = fold_convert (TREE_TYPE (type), rpart);
2307 ipart = fold_convert (TREE_TYPE (type), ipart);
2308 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2316 if (integer_zerop (arg))
2317 return build_zero_vector (type);
2318 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2319 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2320 || TREE_CODE (orig) == VECTOR_TYPE);
2321 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2324 tem = fold_ignored_result (arg);
2325 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2327 return fold_build1 (NOP_EXPR, type, tem);
2334 /* Return false if expr can be assumed not to be an lvalue, true
2338 maybe_lvalue_p (tree x)
2340 /* We only need to wrap lvalue tree codes. */
2341 switch (TREE_CODE (x))
2352 case ALIGN_INDIRECT_REF:
2353 case MISALIGNED_INDIRECT_REF:
2355 case ARRAY_RANGE_REF:
2361 case PREINCREMENT_EXPR:
2362 case PREDECREMENT_EXPR:
2364 case TRY_CATCH_EXPR:
2365 case WITH_CLEANUP_EXPR:
2368 case GIMPLE_MODIFY_STMT:
2377 /* Assume the worst for front-end tree codes. */
2378 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2386 /* Return an expr equal to X but certainly not valid as an lvalue. */
2391 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2396 if (! maybe_lvalue_p (x))
2398 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2401 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2402 Zero means allow extended lvalues. */
2404 int pedantic_lvalues;
2406 /* When pedantic, return an expr equal to X but certainly not valid as a
2407 pedantic lvalue. Otherwise, return X. */
2410 pedantic_non_lvalue (tree x)
2412 if (pedantic_lvalues)
2413 return non_lvalue (x);
2418 /* Given a tree comparison code, return the code that is the logical inverse
2419 of the given code. It is not safe to do this for floating-point
2420 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2421 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2424 invert_tree_comparison (enum tree_code code, bool honor_nans)
2426 if (honor_nans && flag_trapping_math)
2436 return honor_nans ? UNLE_EXPR : LE_EXPR;
2438 return honor_nans ? UNLT_EXPR : LT_EXPR;
2440 return honor_nans ? UNGE_EXPR : GE_EXPR;
2442 return honor_nans ? UNGT_EXPR : GT_EXPR;
2456 return UNORDERED_EXPR;
2457 case UNORDERED_EXPR:
2458 return ORDERED_EXPR;
2464 /* Similar, but return the comparison that results if the operands are
2465 swapped. This is safe for floating-point. */
2468 swap_tree_comparison (enum tree_code code)
2475 case UNORDERED_EXPR:
2501 /* Convert a comparison tree code from an enum tree_code representation
2502 into a compcode bit-based encoding. This function is the inverse of
2503 compcode_to_comparison. */
2505 static enum comparison_code
2506 comparison_to_compcode (enum tree_code code)
2523 return COMPCODE_ORD;
2524 case UNORDERED_EXPR:
2525 return COMPCODE_UNORD;
2527 return COMPCODE_UNLT;
2529 return COMPCODE_UNEQ;
2531 return COMPCODE_UNLE;
2533 return COMPCODE_UNGT;
2535 return COMPCODE_LTGT;
2537 return COMPCODE_UNGE;
2543 /* Convert a compcode bit-based encoding of a comparison operator back
2544 to GCC's enum tree_code representation. This function is the
2545 inverse of comparison_to_compcode. */
2547 static enum tree_code
2548 compcode_to_comparison (enum comparison_code code)
2565 return ORDERED_EXPR;
2566 case COMPCODE_UNORD:
2567 return UNORDERED_EXPR;
2585 /* Return a tree for the comparison which is the combination of
2586 doing the AND or OR (depending on CODE) of the two operations LCODE
2587 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2588 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2589 if this makes the transformation invalid. */
2592 combine_comparisons (enum tree_code code, enum tree_code lcode,
2593 enum tree_code rcode, tree truth_type,
2594 tree ll_arg, tree lr_arg)
2596 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2597 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2598 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2599 enum comparison_code compcode;
2603 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2604 compcode = lcompcode & rcompcode;
2607 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2608 compcode = lcompcode | rcompcode;
2617 /* Eliminate unordered comparisons, as well as LTGT and ORD
2618 which are not used unless the mode has NaNs. */
2619 compcode &= ~COMPCODE_UNORD;
2620 if (compcode == COMPCODE_LTGT)
2621 compcode = COMPCODE_NE;
2622 else if (compcode == COMPCODE_ORD)
2623 compcode = COMPCODE_TRUE;
2625 else if (flag_trapping_math)
2627 /* Check that the original operation and the optimized ones will trap
2628 under the same condition. */
2629 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2630 && (lcompcode != COMPCODE_EQ)
2631 && (lcompcode != COMPCODE_ORD);
2632 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2633 && (rcompcode != COMPCODE_EQ)
2634 && (rcompcode != COMPCODE_ORD);
2635 bool trap = (compcode & COMPCODE_UNORD) == 0
2636 && (compcode != COMPCODE_EQ)
2637 && (compcode != COMPCODE_ORD);
2639 /* In a short-circuited boolean expression the LHS might be
2640 such that the RHS, if evaluated, will never trap. For
2641 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2642 if neither x nor y is NaN. (This is a mixed blessing: for
2643 example, the expression above will never trap, hence
2644 optimizing it to x < y would be invalid). */
2645 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2646 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2649 /* If the comparison was short-circuited, and only the RHS
2650 trapped, we may now generate a spurious trap. */
2652 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2655 /* If we changed the conditions that cause a trap, we lose. */
2656 if ((ltrap || rtrap) != trap)
2660 if (compcode == COMPCODE_TRUE)
2661 return constant_boolean_node (true, truth_type);
2662 else if (compcode == COMPCODE_FALSE)
2663 return constant_boolean_node (false, truth_type);
2665 return fold_build2 (compcode_to_comparison (compcode),
2666 truth_type, ll_arg, lr_arg);
2669 /* Return nonzero if CODE is a tree code that represents a truth value. */
2672 truth_value_p (enum tree_code code)
2674 return (TREE_CODE_CLASS (code) == tcc_comparison
2675 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2676 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2677 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2680 /* Return nonzero if two operands (typically of the same tree node)
2681 are necessarily equal. If either argument has side-effects this
2682 function returns zero. FLAGS modifies behavior as follows:
2684 If OEP_ONLY_CONST is set, only return nonzero for constants.
2685 This function tests whether the operands are indistinguishable;
2686 it does not test whether they are equal using C's == operation.
2687 The distinction is important for IEEE floating point, because
2688 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2689 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2691 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2692 even though it may hold multiple values during a function.
2693 This is because a GCC tree node guarantees that nothing else is
2694 executed between the evaluation of its "operands" (which may often
2695 be evaluated in arbitrary order). Hence if the operands themselves
2696 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2697 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2698 unset means assuming isochronic (or instantaneous) tree equivalence.
2699 Unless comparing arbitrary expression trees, such as from different
2700 statements, this flag can usually be left unset.
2702 If OEP_PURE_SAME is set, then pure functions with identical arguments
2703 are considered the same. It is used when the caller has other ways
2704 to ensure that global memory is unchanged in between. */
2707 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2709 /* If either is ERROR_MARK, they aren't equal. */
2710 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2713 /* If both types don't have the same signedness, then we can't consider
2714 them equal. We must check this before the STRIP_NOPS calls
2715 because they may change the signedness of the arguments. */
2716 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2719 /* If both types don't have the same precision, then it is not safe
2721 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2727 /* In case both args are comparisons but with different comparison
2728 code, try to swap the comparison operands of one arg to produce
2729 a match and compare that variant. */
2730 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2731 && COMPARISON_CLASS_P (arg0)
2732 && COMPARISON_CLASS_P (arg1))
2734 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2736 if (TREE_CODE (arg0) == swap_code)
2737 return operand_equal_p (TREE_OPERAND (arg0, 0),
2738 TREE_OPERAND (arg1, 1), flags)
2739 && operand_equal_p (TREE_OPERAND (arg0, 1),
2740 TREE_OPERAND (arg1, 0), flags);
2743 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2744 /* This is needed for conversions and for COMPONENT_REF.
2745 Might as well play it safe and always test this. */
2746 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2747 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2748 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2751 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2752 We don't care about side effects in that case because the SAVE_EXPR
2753 takes care of that for us. In all other cases, two expressions are
2754 equal if they have no side effects. If we have two identical
2755 expressions with side effects that should be treated the same due
2756 to the only side effects being identical SAVE_EXPR's, that will
2757 be detected in the recursive calls below. */
2758 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2759 && (TREE_CODE (arg0) == SAVE_EXPR
2760 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2763 /* Next handle constant cases, those for which we can return 1 even
2764 if ONLY_CONST is set. */
2765 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2766 switch (TREE_CODE (arg0))
2769 return tree_int_cst_equal (arg0, arg1);
2772 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2773 TREE_REAL_CST (arg1)))
2777 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2779 /* If we do not distinguish between signed and unsigned zero,
2780 consider them equal. */
2781 if (real_zerop (arg0) && real_zerop (arg1))
2790 v1 = TREE_VECTOR_CST_ELTS (arg0);
2791 v2 = TREE_VECTOR_CST_ELTS (arg1);
2794 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2797 v1 = TREE_CHAIN (v1);
2798 v2 = TREE_CHAIN (v2);
2805 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2807 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2811 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2812 && ! memcmp (TREE_STRING_POINTER (arg0),
2813 TREE_STRING_POINTER (arg1),
2814 TREE_STRING_LENGTH (arg0)));
2817 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2823 if (flags & OEP_ONLY_CONST)
2826 /* Define macros to test an operand from arg0 and arg1 for equality and a
2827 variant that allows null and views null as being different from any
2828 non-null value. In the latter case, if either is null, the both
2829 must be; otherwise, do the normal comparison. */
2830 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2831 TREE_OPERAND (arg1, N), flags)
2833 #define OP_SAME_WITH_NULL(N) \
2834 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2835 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2837 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2840 /* Two conversions are equal only if signedness and modes match. */
2841 switch (TREE_CODE (arg0))
2845 case FIX_TRUNC_EXPR:
2846 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2847 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2857 case tcc_comparison:
2859 if (OP_SAME (0) && OP_SAME (1))
2862 /* For commutative ops, allow the other order. */
2863 return (commutative_tree_code (TREE_CODE (arg0))
2864 && operand_equal_p (TREE_OPERAND (arg0, 0),
2865 TREE_OPERAND (arg1, 1), flags)
2866 && operand_equal_p (TREE_OPERAND (arg0, 1),
2867 TREE_OPERAND (arg1, 0), flags));
2870 /* If either of the pointer (or reference) expressions we are
2871 dereferencing contain a side effect, these cannot be equal. */
2872 if (TREE_SIDE_EFFECTS (arg0)
2873 || TREE_SIDE_EFFECTS (arg1))
2876 switch (TREE_CODE (arg0))
2879 case ALIGN_INDIRECT_REF:
2880 case MISALIGNED_INDIRECT_REF:
2886 case ARRAY_RANGE_REF:
2887 /* Operands 2 and 3 may be null. */
2890 && OP_SAME_WITH_NULL (2)
2891 && OP_SAME_WITH_NULL (3));
2894 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2895 may be NULL when we're called to compare MEM_EXPRs. */
2896 return OP_SAME_WITH_NULL (0)
2898 && OP_SAME_WITH_NULL (2);
2901 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2907 case tcc_expression:
2908 switch (TREE_CODE (arg0))
2911 case TRUTH_NOT_EXPR:
2914 case TRUTH_ANDIF_EXPR:
2915 case TRUTH_ORIF_EXPR:
2916 return OP_SAME (0) && OP_SAME (1);
2918 case TRUTH_AND_EXPR:
2920 case TRUTH_XOR_EXPR:
2921 if (OP_SAME (0) && OP_SAME (1))
2924 /* Otherwise take into account this is a commutative operation. */
2925 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2926 TREE_OPERAND (arg1, 1), flags)
2927 && operand_equal_p (TREE_OPERAND (arg0, 1),
2928 TREE_OPERAND (arg1, 0), flags));
2935 switch (TREE_CODE (arg0))
2938 /* If the CALL_EXPRs call different functions, then they
2939 clearly can not be equal. */
2940 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2945 unsigned int cef = call_expr_flags (arg0);
2946 if (flags & OEP_PURE_SAME)
2947 cef &= ECF_CONST | ECF_PURE;
2954 /* Now see if all the arguments are the same. */
2956 call_expr_arg_iterator iter0, iter1;
2958 for (a0 = first_call_expr_arg (arg0, &iter0),
2959 a1 = first_call_expr_arg (arg1, &iter1);
2961 a0 = next_call_expr_arg (&iter0),
2962 a1 = next_call_expr_arg (&iter1))
2963 if (! operand_equal_p (a0, a1, flags))
2966 /* If we get here and both argument lists are exhausted
2967 then the CALL_EXPRs are equal. */
2968 return ! (a0 || a1);
2974 case tcc_declaration:
2975 /* Consider __builtin_sqrt equal to sqrt. */
2976 return (TREE_CODE (arg0) == FUNCTION_DECL
2977 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2978 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2979 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2986 #undef OP_SAME_WITH_NULL
2989 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2990 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2992 When in doubt, return 0. */
2995 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2997 int unsignedp1, unsignedpo;
2998 tree primarg0, primarg1, primother;
2999 unsigned int correct_width;
3001 if (operand_equal_p (arg0, arg1, 0))
3004 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3005 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3008 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3009 and see if the inner values are the same. This removes any
3010 signedness comparison, which doesn't matter here. */
3011 primarg0 = arg0, primarg1 = arg1;
3012 STRIP_NOPS (primarg0);
3013 STRIP_NOPS (primarg1);
3014 if (operand_equal_p (primarg0, primarg1, 0))
3017 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3018 actual comparison operand, ARG0.
3020 First throw away any conversions to wider types
3021 already present in the operands. */
3023 primarg1 = get_narrower (arg1, &unsignedp1);
3024 primother = get_narrower (other, &unsignedpo);
3026 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3027 if (unsignedp1 == unsignedpo
3028 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3029 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3031 tree type = TREE_TYPE (arg0);
3033 /* Make sure shorter operand is extended the right way
3034 to match the longer operand. */
3035 primarg1 = fold_convert (get_signed_or_unsigned_type
3036 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3038 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3045 /* See if ARG is an expression that is either a comparison or is performing
3046 arithmetic on comparisons. The comparisons must only be comparing
3047 two different values, which will be stored in *CVAL1 and *CVAL2; if
3048 they are nonzero it means that some operands have already been found.
3049 No variables may be used anywhere else in the expression except in the
3050 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3051 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3053 If this is true, return 1. Otherwise, return zero. */
3056 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3058 enum tree_code code = TREE_CODE (arg);
3059 enum tree_code_class class = TREE_CODE_CLASS (code);
3061 /* We can handle some of the tcc_expression cases here. */
3062 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3064 else if (class == tcc_expression
3065 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3066 || code == COMPOUND_EXPR))
3069 else if (class == tcc_expression && code == SAVE_EXPR
3070 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3072 /* If we've already found a CVAL1 or CVAL2, this expression is
3073 two complex to handle. */
3074 if (*cval1 || *cval2)
3084 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3087 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3088 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3089 cval1, cval2, save_p));
3094 case tcc_expression:
3095 if (code == COND_EXPR)
3096 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3097 cval1, cval2, save_p)
3098 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3099 cval1, cval2, save_p)
3100 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3101 cval1, cval2, save_p));
3104 case tcc_comparison:
3105 /* First see if we can handle the first operand, then the second. For
3106 the second operand, we know *CVAL1 can't be zero. It must be that
3107 one side of the comparison is each of the values; test for the
3108 case where this isn't true by failing if the two operands
3111 if (operand_equal_p (TREE_OPERAND (arg, 0),
3112 TREE_OPERAND (arg, 1), 0))
3116 *cval1 = TREE_OPERAND (arg, 0);
3117 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3119 else if (*cval2 == 0)
3120 *cval2 = TREE_OPERAND (arg, 0);
3121 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3126 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3128 else if (*cval2 == 0)
3129 *cval2 = TREE_OPERAND (arg, 1);
3130 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3142 /* ARG is a tree that is known to contain just arithmetic operations and
3143 comparisons. Evaluate the operations in the tree substituting NEW0 for
3144 any occurrence of OLD0 as an operand of a comparison and likewise for
3148 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3150 tree type = TREE_TYPE (arg);
3151 enum tree_code code = TREE_CODE (arg);
3152 enum tree_code_class class = TREE_CODE_CLASS (code);
3154 /* We can handle some of the tcc_expression cases here. */
3155 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3157 else if (class == tcc_expression
3158 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3164 return fold_build1 (code, type,
3165 eval_subst (TREE_OPERAND (arg, 0),
3166 old0, new0, old1, new1));
3169 return fold_build2 (code, type,
3170 eval_subst (TREE_OPERAND (arg, 0),
3171 old0, new0, old1, new1),
3172 eval_subst (TREE_OPERAND (arg, 1),
3173 old0, new0, old1, new1));
3175 case tcc_expression:
3179 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3182 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3185 return fold_build3 (code, type,
3186 eval_subst (TREE_OPERAND (arg, 0),
3187 old0, new0, old1, new1),
3188 eval_subst (TREE_OPERAND (arg, 1),
3189 old0, new0, old1, new1),
3190 eval_subst (TREE_OPERAND (arg, 2),
3191 old0, new0, old1, new1));
3195 /* Fall through - ??? */
3197 case tcc_comparison:
3199 tree arg0 = TREE_OPERAND (arg, 0);
3200 tree arg1 = TREE_OPERAND (arg, 1);
3202 /* We need to check both for exact equality and tree equality. The
3203 former will be true if the operand has a side-effect. In that
3204 case, we know the operand occurred exactly once. */
3206 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3208 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3211 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3213 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3216 return fold_build2 (code, type, arg0, arg1);
3224 /* Return a tree for the case when the result of an expression is RESULT
3225 converted to TYPE and OMITTED was previously an operand of the expression
3226 but is now not needed (e.g., we folded OMITTED * 0).
3228 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3229 the conversion of RESULT to TYPE. */
3232 omit_one_operand (tree type, tree result, tree omitted)
3234 tree t = fold_convert (type, result);
3236 if (TREE_SIDE_EFFECTS (omitted))
3237 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3239 return non_lvalue (t);
3242 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3245 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3247 tree t = fold_convert (type, result);
3249 if (TREE_SIDE_EFFECTS (omitted))
3250 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3252 return pedantic_non_lvalue (t);
3255 /* Return a tree for the case when the result of an expression is RESULT
3256 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3257 of the expression but are now not needed.
3259 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3260 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3261 evaluated before OMITTED2. Otherwise, if neither has side effects,
3262 just do the conversion of RESULT to TYPE. */
3265 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3267 tree t = fold_convert (type, result);
3269 if (TREE_SIDE_EFFECTS (omitted2))
3270 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3271 if (TREE_SIDE_EFFECTS (omitted1))
3272 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3274 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3278 /* Return a simplified tree node for the truth-negation of ARG. This
3279 never alters ARG itself. We assume that ARG is an operation that
3280 returns a truth value (0 or 1).
3282 FIXME: one would think we would fold the result, but it causes
3283 problems with the dominator optimizer. */
3286 fold_truth_not_expr (tree arg)
3288 tree type = TREE_TYPE (arg);
3289 enum tree_code code = TREE_CODE (arg);
3291 /* If this is a comparison, we can simply invert it, except for
3292 floating-point non-equality comparisons, in which case we just
3293 enclose a TRUTH_NOT_EXPR around what we have. */
3295 if (TREE_CODE_CLASS (code) == tcc_comparison)
3297 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3298 if (FLOAT_TYPE_P (op_type)
3299 && flag_trapping_math
3300 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3301 && code != NE_EXPR && code != EQ_EXPR)
3305 code = invert_tree_comparison (code,
3306 HONOR_NANS (TYPE_MODE (op_type)));
3307 if (code == ERROR_MARK)
3310 return build2 (code, type,
3311 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3318 return constant_boolean_node (integer_zerop (arg), type);
3320 case TRUTH_AND_EXPR:
3321 return build2 (TRUTH_OR_EXPR, type,
3322 invert_truthvalue (TREE_OPERAND (arg, 0)),
3323 invert_truthvalue (TREE_OPERAND (arg, 1)));
3326 return build2 (TRUTH_AND_EXPR, type,
3327 invert_truthvalue (TREE_OPERAND (arg, 0)),
3328 invert_truthvalue (TREE_OPERAND (arg, 1)));
3330 case TRUTH_XOR_EXPR:
3331 /* Here we can invert either operand. We invert the first operand
3332 unless the second operand is a TRUTH_NOT_EXPR in which case our
3333 result is the XOR of the first operand with the inside of the
3334 negation of the second operand. */
3336 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3337 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3338 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3340 return build2 (TRUTH_XOR_EXPR, type,
3341 invert_truthvalue (TREE_OPERAND (arg, 0)),
3342 TREE_OPERAND (arg, 1));
3344 case TRUTH_ANDIF_EXPR:
3345 return build2 (TRUTH_ORIF_EXPR, type,
3346 invert_truthvalue (TREE_OPERAND (arg, 0)),
3347 invert_truthvalue (TREE_OPERAND (arg, 1)));
3349 case TRUTH_ORIF_EXPR:
3350 return build2 (TRUTH_ANDIF_EXPR, type,
3351 invert_truthvalue (TREE_OPERAND (arg, 0)),
3352 invert_truthvalue (TREE_OPERAND (arg, 1)));
3354 case TRUTH_NOT_EXPR:
3355 return TREE_OPERAND (arg, 0);
3359 tree arg1 = TREE_OPERAND (arg, 1);
3360 tree arg2 = TREE_OPERAND (arg, 2);
3361 /* A COND_EXPR may have a throw as one operand, which
3362 then has void type. Just leave void operands
3364 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3365 VOID_TYPE_P (TREE_TYPE (arg1))
3366 ? arg1 : invert_truthvalue (arg1),
3367 VOID_TYPE_P (TREE_TYPE (arg2))
3368 ? arg2 : invert_truthvalue (arg2));
3372 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3373 invert_truthvalue (TREE_OPERAND (arg, 1)));
3375 case NON_LVALUE_EXPR:
3376 return invert_truthvalue (TREE_OPERAND (arg, 0));
3379 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3380 return build1 (TRUTH_NOT_EXPR, type, arg);
3384 return build1 (TREE_CODE (arg), type,
3385 invert_truthvalue (TREE_OPERAND (arg, 0)));
3388 if (!integer_onep (TREE_OPERAND (arg, 1)))
3390 return build2 (EQ_EXPR, type, arg,
3391 build_int_cst (type, 0));
3394 return build1 (TRUTH_NOT_EXPR, type, arg);
3396 case CLEANUP_POINT_EXPR:
3397 return build1 (CLEANUP_POINT_EXPR, type,
3398 invert_truthvalue (TREE_OPERAND (arg, 0)));
3407 /* Return a simplified tree node for the truth-negation of ARG. This
3408 never alters ARG itself. We assume that ARG is an operation that
3409 returns a truth value (0 or 1).
3411 FIXME: one would think we would fold the result, but it causes
3412 problems with the dominator optimizer. */
3415 invert_truthvalue (tree arg)
3419 if (TREE_CODE (arg) == ERROR_MARK)
3422 tem = fold_truth_not_expr (arg);
3424 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3429 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3430 operands are another bit-wise operation with a common input. If so,
3431 distribute the bit operations to save an operation and possibly two if
3432 constants are involved. For example, convert
3433 (A | B) & (A | C) into A | (B & C)
3434 Further simplification will occur if B and C are constants.
3436 If this optimization cannot be done, 0 will be returned. */
3439 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3444 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3445 || TREE_CODE (arg0) == code
3446 || (TREE_CODE (arg0) != BIT_AND_EXPR
3447 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3450 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3452 common = TREE_OPERAND (arg0, 0);
3453 left = TREE_OPERAND (arg0, 1);
3454 right = TREE_OPERAND (arg1, 1);
3456 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3458 common = TREE_OPERAND (arg0, 0);
3459 left = TREE_OPERAND (arg0, 1);
3460 right = TREE_OPERAND (arg1, 0);
3462 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3464 common = TREE_OPERAND (arg0, 1);
3465 left = TREE_OPERAND (arg0, 0);
3466 right = TREE_OPERAND (arg1, 1);
3468 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3470 common = TREE_OPERAND (arg0, 1);
3471 left = TREE_OPERAND (arg0, 0);
3472 right = TREE_OPERAND (arg1, 0);
3477 return fold_build2 (TREE_CODE (arg0), type, common,
3478 fold_build2 (code, type, left, right));
3481 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3482 with code CODE. This optimization is unsafe. */
3484 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3486 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3487 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3489 /* (A / C) +- (B / C) -> (A +- B) / C. */
3491 && operand_equal_p (TREE_OPERAND (arg0, 1),
3492 TREE_OPERAND (arg1, 1), 0))
3493 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3494 fold_build2 (code, type,
3495 TREE_OPERAND (arg0, 0),
3496 TREE_OPERAND (arg1, 0)),
3497 TREE_OPERAND (arg0, 1));
3499 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3500 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3501 TREE_OPERAND (arg1, 0), 0)
3502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3503 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3505 REAL_VALUE_TYPE r0, r1;
3506 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3507 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3509 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3511 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3512 real_arithmetic (&r0, code, &r0, &r1);
3513 return fold_build2 (MULT_EXPR, type,
3514 TREE_OPERAND (arg0, 0),
3515 build_real (type, r0));
3521 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3522 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3525 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3532 tree size = TYPE_SIZE (TREE_TYPE (inner));
3533 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3534 || POINTER_TYPE_P (TREE_TYPE (inner)))
3535 && host_integerp (size, 0)
3536 && tree_low_cst (size, 0) == bitsize)
3537 return fold_convert (type, inner);
3540 result = build3 (BIT_FIELD_REF, type, inner,
3541 size_int (bitsize), bitsize_int (bitpos));
3543 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3548 /* Optimize a bit-field compare.
3550 There are two cases: First is a compare against a constant and the
3551 second is a comparison of two items where the fields are at the same
3552 bit position relative to the start of a chunk (byte, halfword, word)
3553 large enough to contain it. In these cases we can avoid the shift
3554 implicit in bitfield extractions.
3556 For constants, we emit a compare of the shifted constant with the
3557 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3558 compared. For two fields at the same position, we do the ANDs with the
3559 similar mask and compare the result of the ANDs.
3561 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3562 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3563 are the left and right operands of the comparison, respectively.
3565 If the optimization described above can be done, we return the resulting
3566 tree. Otherwise we return zero. */
3569 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3572 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3573 tree type = TREE_TYPE (lhs);
3574 tree signed_type, unsigned_type;
3575 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3576 enum machine_mode lmode, rmode, nmode;
3577 int lunsignedp, runsignedp;
3578 int lvolatilep = 0, rvolatilep = 0;
3579 tree linner, rinner = NULL_TREE;
3583 /* Get all the information about the extractions being done. If the bit size
3584 if the same as the size of the underlying object, we aren't doing an
3585 extraction at all and so can do nothing. We also don't want to
3586 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3587 then will no longer be able to replace it. */
3588 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3589 &lunsignedp, &lvolatilep, false);
3590 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3591 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3596 /* If this is not a constant, we can only do something if bit positions,
3597 sizes, and signedness are the same. */
3598 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3599 &runsignedp, &rvolatilep, false);
3601 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3602 || lunsignedp != runsignedp || offset != 0
3603 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3607 /* See if we can find a mode to refer to this field. We should be able to,
3608 but fail if we can't. */
3609 nmode = get_best_mode (lbitsize, lbitpos,
3610 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3611 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3612 TYPE_ALIGN (TREE_TYPE (rinner))),
3613 word_mode, lvolatilep || rvolatilep);
3614 if (nmode == VOIDmode)
3617 /* Set signed and unsigned types of the precision of this mode for the
3619 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3620 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3622 /* Compute the bit position and size for the new reference and our offset
3623 within it. If the new reference is the same size as the original, we
3624 won't optimize anything, so return zero. */
3625 nbitsize = GET_MODE_BITSIZE (nmode);
3626 nbitpos = lbitpos & ~ (nbitsize - 1);
3628 if (nbitsize == lbitsize)
3631 if (BYTES_BIG_ENDIAN)
3632 lbitpos = nbitsize - lbitsize - lbitpos;
3634 /* Make the mask to be used against the extracted field. */
3635 mask = build_int_cst_type (unsigned_type, -1);
3636 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3637 mask = const_binop (RSHIFT_EXPR, mask,
3638 size_int (nbitsize - lbitsize - lbitpos), 0);
3641 /* If not comparing with constant, just rework the comparison
3643 return fold_build2 (code, compare_type,
3644 fold_build2 (BIT_AND_EXPR, unsigned_type,
3645 make_bit_field_ref (linner,
3650 fold_build2 (BIT_AND_EXPR, unsigned_type,
3651 make_bit_field_ref (rinner,
3657 /* Otherwise, we are handling the constant case. See if the constant is too
3658 big for the field. Warn and return a tree of for 0 (false) if so. We do
3659 this not only for its own sake, but to avoid having to test for this
3660 error case below. If we didn't, we might generate wrong code.
3662 For unsigned fields, the constant shifted right by the field length should
3663 be all zero. For signed fields, the high-order bits should agree with
3668 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3669 fold_convert (unsigned_type, rhs),
3670 size_int (lbitsize), 0)))
3672 warning (0, "comparison is always %d due to width of bit-field",
3674 return constant_boolean_node (code == NE_EXPR, compare_type);
3679 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3680 size_int (lbitsize - 1), 0);
3681 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3683 warning (0, "comparison is always %d due to width of bit-field",
3685 return constant_boolean_node (code == NE_EXPR, compare_type);
3689 /* Single-bit compares should always be against zero. */
3690 if (lbitsize == 1 && ! integer_zerop (rhs))
3692 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3693 rhs = build_int_cst (type, 0);
3696 /* Make a new bitfield reference, shift the constant over the
3697 appropriate number of bits and mask it with the computed mask
3698 (in case this was a signed field). If we changed it, make a new one. */
3699 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3702 TREE_SIDE_EFFECTS (lhs) = 1;
3703 TREE_THIS_VOLATILE (lhs) = 1;
3706 rhs = const_binop (BIT_AND_EXPR,
3707 const_binop (LSHIFT_EXPR,
3708 fold_convert (unsigned_type, rhs),
3709 size_int (lbitpos), 0),
3712 return build2 (code, compare_type,
3713 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3717 /* Subroutine for fold_truthop: decode a field reference.
3719 If EXP is a comparison reference, we return the innermost reference.
3721 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3722 set to the starting bit number.
3724 If the innermost field can be completely contained in a mode-sized
3725 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3727 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3728 otherwise it is not changed.
3730 *PUNSIGNEDP is set to the signedness of the field.
3732 *PMASK is set to the mask used. This is either contained in a
3733 BIT_AND_EXPR or derived from the width of the field.
3735 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3737 Return 0 if this is not a component reference or is one that we can't
3738 do anything with. */
3741 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3742 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3743 int *punsignedp, int *pvolatilep,
3744 tree *pmask, tree *pand_mask)
3746 tree outer_type = 0;
3748 tree mask, inner, offset;
3750 unsigned int precision;
3752 /* All the optimizations using this function assume integer fields.
3753 There are problems with FP fields since the type_for_size call
3754 below can fail for, e.g., XFmode. */
3755 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3758 /* We are interested in the bare arrangement of bits, so strip everything
3759 that doesn't affect the machine mode. However, record the type of the
3760 outermost expression if it may matter below. */
3761 if (TREE_CODE (exp) == NOP_EXPR
3762 || TREE_CODE (exp) == CONVERT_EXPR
3763 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3764 outer_type = TREE_TYPE (exp);
3767 if (TREE_CODE (exp) == BIT_AND_EXPR)
3769 and_mask = TREE_OPERAND (exp, 1);
3770 exp = TREE_OPERAND (exp, 0);
3771 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3772 if (TREE_CODE (and_mask) != INTEGER_CST)
3776 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3777 punsignedp, pvolatilep, false);
3778 if ((inner == exp && and_mask == 0)
3779 || *pbitsize < 0 || offset != 0
3780 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3783 /* If the number of bits in the reference is the same as the bitsize of
3784 the outer type, then the outer type gives the signedness. Otherwise
3785 (in case of a small bitfield) the signedness is unchanged. */
3786 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3787 *punsignedp = TYPE_UNSIGNED (outer_type);
3789 /* Compute the mask to access the bitfield. */
3790 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3791 precision = TYPE_PRECISION (unsigned_type);
3793 mask = build_int_cst_type (unsigned_type, -1);
3795 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3796 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3798 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3800 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3801 fold_convert (unsigned_type, and_mask), mask);
3804 *pand_mask = and_mask;
3808 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3812 all_ones_mask_p (tree mask, int size)
3814 tree type = TREE_TYPE (mask);
3815 unsigned int precision = TYPE_PRECISION (type);
3818 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3821 tree_int_cst_equal (mask,
3822 const_binop (RSHIFT_EXPR,
3823 const_binop (LSHIFT_EXPR, tmask,
3824 size_int (precision - size),
3826 size_int (precision - size), 0));
3829 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3830 represents the sign bit of EXP's type. If EXP represents a sign
3831 or zero extension, also test VAL against the unextended type.
3832 The return value is the (sub)expression whose sign bit is VAL,
3833 or NULL_TREE otherwise. */
3836 sign_bit_p (tree exp, tree val)
3838 unsigned HOST_WIDE_INT mask_lo, lo;
3839 HOST_WIDE_INT mask_hi, hi;
3843 /* Tree EXP must have an integral type. */
3844 t = TREE_TYPE (exp);
3845 if (! INTEGRAL_TYPE_P (t))
3848 /* Tree VAL must be an integer constant. */
3849 if (TREE_CODE (val) != INTEGER_CST
3850 || TREE_OVERFLOW (val))
3853 width = TYPE_PRECISION (t);
3854 if (width > HOST_BITS_PER_WIDE_INT)
3856 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3859 mask_hi = ((unsigned HOST_WIDE_INT) -1
3860 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3866 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3869 mask_lo = ((unsigned HOST_WIDE_INT) -1
3870 >> (HOST_BITS_PER_WIDE_INT - width));
3873 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3874 treat VAL as if it were unsigned. */
3875 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3876 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3879 /* Handle extension from a narrower type. */
3880 if (TREE_CODE (exp) == NOP_EXPR
3881 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3882 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3887 /* Subroutine for fold_truthop: determine if an operand is simple enough
3888 to be evaluated unconditionally. */
3891 simple_operand_p (tree exp)
3893 /* Strip any conversions that don't change the machine mode. */
3896 return (CONSTANT_CLASS_P (exp)
3897 || TREE_CODE (exp) == SSA_NAME
3899 && ! TREE_ADDRESSABLE (exp)
3900 && ! TREE_THIS_VOLATILE (exp)
3901 && ! DECL_NONLOCAL (exp)
3902 /* Don't regard global variables as simple. They may be
3903 allocated in ways unknown to the compiler (shared memory,
3904 #pragma weak, etc). */
3905 && ! TREE_PUBLIC (exp)
3906 && ! DECL_EXTERNAL (exp)
3907 /* Loading a static variable is unduly expensive, but global
3908 registers aren't expensive. */
3909 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3912 /* The following functions are subroutines to fold_range_test and allow it to
3913 try to change a logical combination of comparisons into a range test.
3916 X == 2 || X == 3 || X == 4 || X == 5
3920 (unsigned) (X - 2) <= 3
3922 We describe each set of comparisons as being either inside or outside
3923 a range, using a variable named like IN_P, and then describe the
3924 range with a lower and upper bound. If one of the bounds is omitted,
3925 it represents either the highest or lowest value of the type.
3927 In the comments below, we represent a range by two numbers in brackets
3928 preceded by a "+" to designate being inside that range, or a "-" to
3929 designate being outside that range, so the condition can be inverted by
3930 flipping the prefix. An omitted bound is represented by a "-". For
3931 example, "- [-, 10]" means being outside the range starting at the lowest
3932 possible value and ending at 10, in other words, being greater than 10.
3933 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3936 We set up things so that the missing bounds are handled in a consistent
3937 manner so neither a missing bound nor "true" and "false" need to be
3938 handled using a special case. */
3940 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3941 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3942 and UPPER1_P are nonzero if the respective argument is an upper bound
3943 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3944 must be specified for a comparison. ARG1 will be converted to ARG0's
3945 type if both are specified. */
3948 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3949 tree arg1, int upper1_p)
3955 /* If neither arg represents infinity, do the normal operation.
3956 Else, if not a comparison, return infinity. Else handle the special
3957 comparison rules. Note that most of the cases below won't occur, but
3958 are handled for consistency. */
3960 if (arg0 != 0 && arg1 != 0)
3962 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3963 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3965 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3968 if (TREE_CODE_CLASS (code) != tcc_comparison)
3971 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3972 for neither. In real maths, we cannot assume open ended ranges are
3973 the same. But, this is computer arithmetic, where numbers are finite.
3974 We can therefore make the transformation of any unbounded range with
3975 the value Z, Z being greater than any representable number. This permits
3976 us to treat unbounded ranges as equal. */
3977 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3978 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3982 result = sgn0 == sgn1;
3985 result = sgn0 != sgn1;
3988 result = sgn0 < sgn1;
3991 result = sgn0 <= sgn1;
3994 result = sgn0 > sgn1;
3997 result = sgn0 >= sgn1;
4003 return constant_boolean_node (result, type);
4006 /* Given EXP, a logical expression, set the range it is testing into
4007 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4008 actually being tested. *PLOW and *PHIGH will be made of the same
4009 type as the returned expression. If EXP is not a comparison, we
4010 will most likely not be returning a useful value and range. Set
4011 *STRICT_OVERFLOW_P to true if the return value is only valid
4012 because signed overflow is undefined; otherwise, do not change
4013 *STRICT_OVERFLOW_P. */
4016 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4017 bool *strict_overflow_p)
4019 enum tree_code code;
4020 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4021 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4023 tree low, high, n_low, n_high;
4025 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4026 and see if we can refine the range. Some of the cases below may not
4027 happen, but it doesn't seem worth worrying about this. We "continue"
4028 the outer loop when we've changed something; otherwise we "break"
4029 the switch, which will "break" the while. */
4032 low = high = build_int_cst (TREE_TYPE (exp), 0);
4036 code = TREE_CODE (exp);
4037 exp_type = TREE_TYPE (exp);
4039 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4041 if (TREE_OPERAND_LENGTH (exp) > 0)
4042 arg0 = TREE_OPERAND (exp, 0);
4043 if (TREE_CODE_CLASS (code) == tcc_comparison
4044 || TREE_CODE_CLASS (code) == tcc_unary
4045 || TREE_CODE_CLASS (code) == tcc_binary)
4046 arg0_type = TREE_TYPE (arg0);
4047 if (TREE_CODE_CLASS (code) == tcc_binary
4048 || TREE_CODE_CLASS (code) == tcc_comparison
4049 || (TREE_CODE_CLASS (code) == tcc_expression
4050 && TREE_OPERAND_LENGTH (exp) > 1))
4051 arg1 = TREE_OPERAND (exp, 1);
4056 case TRUTH_NOT_EXPR:
4057 in_p = ! in_p, exp = arg0;
4060 case EQ_EXPR: case NE_EXPR:
4061 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4062 /* We can only do something if the range is testing for zero
4063 and if the second operand is an integer constant. Note that
4064 saying something is "in" the range we make is done by
4065 complementing IN_P since it will set in the initial case of
4066 being not equal to zero; "out" is leaving it alone. */
4067 if (low == 0 || high == 0
4068 || ! integer_zerop (low) || ! integer_zerop (high)
4069 || TREE_CODE (arg1) != INTEGER_CST)
4074 case NE_EXPR: /* - [c, c] */
4077 case EQ_EXPR: /* + [c, c] */
4078 in_p = ! in_p, low = high = arg1;
4080 case GT_EXPR: /* - [-, c] */
4081 low = 0, high = arg1;
4083 case GE_EXPR: /* + [c, -] */
4084 in_p = ! in_p, low = arg1, high = 0;
4086 case LT_EXPR: /* - [c, -] */
4087 low = arg1, high = 0;
4089 case LE_EXPR: /* + [-, c] */
4090 in_p = ! in_p, low = 0, high = arg1;
4096 /* If this is an unsigned comparison, we also know that EXP is
4097 greater than or equal to zero. We base the range tests we make
4098 on that fact, so we record it here so we can parse existing
4099 range tests. We test arg0_type since often the return type
4100 of, e.g. EQ_EXPR, is boolean. */
4101 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4103 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4105 build_int_cst (arg0_type, 0),
4109 in_p = n_in_p, low = n_low, high = n_high;
4111 /* If the high bound is missing, but we have a nonzero low
4112 bound, reverse the range so it goes from zero to the low bound
4114 if (high == 0 && low && ! integer_zerop (low))
4117 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4118 integer_one_node, 0);
4119 low = build_int_cst (arg0_type, 0);
4127 /* (-x) IN [a,b] -> x in [-b, -a] */
4128 n_low = range_binop (MINUS_EXPR, exp_type,
4129 build_int_cst (exp_type, 0),
4131 n_high = range_binop (MINUS_EXPR, exp_type,
4132 build_int_cst (exp_type, 0),
4134 low = n_low, high = n_high;
4140 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4141 build_int_cst (exp_type, 1));
4144 case PLUS_EXPR: case MINUS_EXPR:
4145 if (TREE_CODE (arg1) != INTEGER_CST)
4148 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4149 move a constant to the other side. */
4150 if (!TYPE_UNSIGNED (arg0_type)
4151 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4154 /* If EXP is signed, any overflow in the computation is undefined,
4155 so we don't worry about it so long as our computations on
4156 the bounds don't overflow. For unsigned, overflow is defined
4157 and this is exactly the right thing. */
4158 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4159 arg0_type, low, 0, arg1, 0);
4160 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4161 arg0_type, high, 1, arg1, 0);
4162 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4163 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4166 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4167 *strict_overflow_p = true;
4169 /* Check for an unsigned range which has wrapped around the maximum
4170 value thus making n_high < n_low, and normalize it. */
4171 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4173 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4174 integer_one_node, 0);
4175 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4176 integer_one_node, 0);
4178 /* If the range is of the form +/- [ x+1, x ], we won't
4179 be able to normalize it. But then, it represents the
4180 whole range or the empty set, so make it
4182 if (tree_int_cst_equal (n_low, low)
4183 && tree_int_cst_equal (n_high, high))
4189 low = n_low, high = n_high;
4194 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4195 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4198 if (! INTEGRAL_TYPE_P (arg0_type)
4199 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4200 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4203 n_low = low, n_high = high;
4206 n_low = fold_convert (arg0_type, n_low);
4209 n_high = fold_convert (arg0_type, n_high);
4212 /* If we're converting arg0 from an unsigned type, to exp,
4213 a signed type, we will be doing the comparison as unsigned.
4214 The tests above have already verified that LOW and HIGH
4217 So we have to ensure that we will handle large unsigned
4218 values the same way that the current signed bounds treat
4221 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4224 tree equiv_type = lang_hooks.types.type_for_mode
4225 (TYPE_MODE (arg0_type), 1);
4227 /* A range without an upper bound is, naturally, unbounded.
4228 Since convert would have cropped a very large value, use
4229 the max value for the destination type. */
4231 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4232 : TYPE_MAX_VALUE (arg0_type);
4234 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4235 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4236 fold_convert (arg0_type,
4238 build_int_cst (arg0_type, 1));
4240 /* If the low bound is specified, "and" the range with the
4241 range for which the original unsigned value will be
4245 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4246 1, n_low, n_high, 1,
4247 fold_convert (arg0_type,
4252 in_p = (n_in_p == in_p);
4256 /* Otherwise, "or" the range with the range of the input
4257 that will be interpreted as negative. */
4258 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4259 0, n_low, n_high, 1,
4260 fold_convert (arg0_type,
4265 in_p = (in_p != n_in_p);
4270 low = n_low, high = n_high;
4280 /* If EXP is a constant, we can evaluate whether this is true or false. */
4281 if (TREE_CODE (exp) == INTEGER_CST)
4283 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4285 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4291 *pin_p = in_p, *plow = low, *phigh = high;
4295 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4296 type, TYPE, return an expression to test if EXP is in (or out of, depending
4297 on IN_P) the range. Return 0 if the test couldn't be created. */
4300 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4302 tree etype = TREE_TYPE (exp);
4305 #ifdef HAVE_canonicalize_funcptr_for_compare
4306 /* Disable this optimization for function pointer expressions
4307 on targets that require function pointer canonicalization. */
4308 if (HAVE_canonicalize_funcptr_for_compare
4309 && TREE_CODE (etype) == POINTER_TYPE
4310 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4316 value = build_range_check (type, exp, 1, low, high);
4318 return invert_truthvalue (value);
4323 if (low == 0 && high == 0)
4324 return build_int_cst (type, 1);
4327 return fold_build2 (LE_EXPR, type, exp,
4328 fold_convert (etype, high));
4331 return fold_build2 (GE_EXPR, type, exp,
4332 fold_convert (etype, low));
4334 if (operand_equal_p (low, high, 0))
4335 return fold_build2 (EQ_EXPR, type, exp,
4336 fold_convert (etype, low));
4338 if (integer_zerop (low))
4340 if (! TYPE_UNSIGNED (etype))
4342 etype = lang_hooks.types.unsigned_type (etype);
4343 high = fold_convert (etype, high);
4344 exp = fold_convert (etype, exp);
4346 return build_range_check (type, exp, 1, 0, high);
4349 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4350 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4352 unsigned HOST_WIDE_INT lo;
4356 prec = TYPE_PRECISION (etype);
4357 if (prec <= HOST_BITS_PER_WIDE_INT)
4360 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4364 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4365 lo = (unsigned HOST_WIDE_INT) -1;
4368 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4370 if (TYPE_UNSIGNED (etype))
4372 etype = lang_hooks.types.signed_type (etype);
4373 exp = fold_convert (etype, exp);
4375 return fold_build2 (GT_EXPR, type, exp,
4376 build_int_cst (etype, 0));
4380 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4381 This requires wrap-around arithmetics for the type of the expression. */
4382 switch (TREE_CODE (etype))
4385 /* There is no requirement that LOW be within the range of ETYPE
4386 if the latter is a subtype. It must, however, be within the base
4387 type of ETYPE. So be sure we do the subtraction in that type. */
4388 if (TREE_TYPE (etype))
4389 etype = TREE_TYPE (etype);
4394 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4395 TYPE_UNSIGNED (etype));
4402 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4403 if (TREE_CODE (etype) == INTEGER_TYPE
4404 && !TYPE_OVERFLOW_WRAPS (etype))
4406 tree utype, minv, maxv;
4408 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4409 for the type in question, as we rely on this here. */
4410 utype = lang_hooks.types.unsigned_type (etype);
4411 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4412 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4413 integer_one_node, 1);
4414 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4416 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4423 high = fold_convert (etype, high);
4424 low = fold_convert (etype, low);
4425 exp = fold_convert (etype, exp);
4427 value = const_binop (MINUS_EXPR, high, low, 0);
4429 if (value != 0 && !TREE_OVERFLOW (value))
4430 return build_range_check (type,
4431 fold_build2 (MINUS_EXPR, etype, exp, low),
4432 1, build_int_cst (etype, 0), value);
4437 /* Return the predecessor of VAL in its type, handling the infinite case. */
4440 range_predecessor (tree val)
4442 tree type = TREE_TYPE (val);
4444 if (INTEGRAL_TYPE_P (type)
4445 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4448 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4451 /* Return the successor of VAL in its type, handling the infinite case. */
4454 range_successor (tree val)
4456 tree type = TREE_TYPE (val);
4458 if (INTEGRAL_TYPE_P (type)
4459 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4462 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4465 /* Given two ranges, see if we can merge them into one. Return 1 if we
4466 can, 0 if we can't. Set the output range into the specified parameters. */
4469 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4470 tree high0, int in1_p, tree low1, tree high1)
4478 int lowequal = ((low0 == 0 && low1 == 0)
4479 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4480 low0, 0, low1, 0)));
4481 int highequal = ((high0 == 0 && high1 == 0)
4482 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4483 high0, 1, high1, 1)));
4485 /* Make range 0 be the range that starts first, or ends last if they
4486 start at the same value. Swap them if it isn't. */
4487 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4490 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4491 high1, 1, high0, 1))))
4493 temp = in0_p, in0_p = in1_p, in1_p = temp;
4494 tem = low0, low0 = low1, low1 = tem;
4495 tem = high0, high0 = high1, high1 = tem;
4498 /* Now flag two cases, whether the ranges are disjoint or whether the
4499 second range is totally subsumed in the first. Note that the tests
4500 below are simplified by the ones above. */
4501 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4502 high0, 1, low1, 0));
4503 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4504 high1, 1, high0, 1));
4506 /* We now have four cases, depending on whether we are including or
4507 excluding the two ranges. */
4510 /* If they don't overlap, the result is false. If the second range
4511 is a subset it is the result. Otherwise, the range is from the start
4512 of the second to the end of the first. */
4514 in_p = 0, low = high = 0;
4516 in_p = 1, low = low1, high = high1;
4518 in_p = 1, low = low1, high = high0;
4521 else if (in0_p && ! in1_p)
4523 /* If they don't overlap, the result is the first range. If they are
4524 equal, the result is false. If the second range is a subset of the
4525 first, and the ranges begin at the same place, we go from just after
4526 the end of the second range to the end of the first. If the second
4527 range is not a subset of the first, or if it is a subset and both
4528 ranges end at the same place, the range starts at the start of the
4529 first range and ends just before the second range.
4530 Otherwise, we can't describe this as a single range. */
4532 in_p = 1, low = low0, high = high0;
4533 else if (lowequal && highequal)
4534 in_p = 0, low = high = 0;
4535 else if (subset && lowequal)
4537 low = range_successor (high1);
4541 else if (! subset || highequal)
4544 high = range_predecessor (low1);
4551 else if (! in0_p && in1_p)
4553 /* If they don't overlap, the result is the second range. If the second
4554 is a subset of the first, the result is false. Otherwise,
4555 the range starts just after the first range and ends at the
4556 end of the second. */
4558 in_p = 1, low = low1, high = high1;
4559 else if (subset || highequal)
4560 in_p = 0, low = high = 0;
4563 low = range_successor (high0);
4571 /* The case where we are excluding both ranges. Here the complex case
4572 is if they don't overlap. In that case, the only time we have a
4573 range is if they are adjacent. If the second is a subset of the
4574 first, the result is the first. Otherwise, the range to exclude
4575 starts at the beginning of the first range and ends at the end of the
4579 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4580 range_successor (high0),
4582 in_p = 0, low = low0, high = high1;
4585 /* Canonicalize - [min, x] into - [-, x]. */
4586 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4587 switch (TREE_CODE (TREE_TYPE (low0)))
4590 if (TYPE_PRECISION (TREE_TYPE (low0))
4591 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4595 if (tree_int_cst_equal (low0,
4596 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4600 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4601 && integer_zerop (low0))
4608 /* Canonicalize - [x, max] into - [x, -]. */
4609 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4610 switch (TREE_CODE (TREE_TYPE (high1)))
4613 if (TYPE_PRECISION (TREE_TYPE (high1))
4614 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4618 if (tree_int_cst_equal (high1,
4619 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4623 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4624 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4626 integer_one_node, 1)))
4633 /* The ranges might be also adjacent between the maximum and
4634 minimum values of the given type. For
4635 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4636 return + [x + 1, y - 1]. */
4637 if (low0 == 0 && high1 == 0)
4639 low = range_successor (high0);
4640 high = range_predecessor (low1);
4641 if (low == 0 || high == 0)
4651 in_p = 0, low = low0, high = high0;
4653 in_p = 0, low = low0, high = high1;
4656 *pin_p = in_p, *plow = low, *phigh = high;
4661 /* Subroutine of fold, looking inside expressions of the form
4662 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4663 of the COND_EXPR. This function is being used also to optimize
4664 A op B ? C : A, by reversing the comparison first.
4666 Return a folded expression whose code is not a COND_EXPR
4667 anymore, or NULL_TREE if no folding opportunity is found. */
4670 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4672 enum tree_code comp_code = TREE_CODE (arg0);
4673 tree arg00 = TREE_OPERAND (arg0, 0);
4674 tree arg01 = TREE_OPERAND (arg0, 1);
4675 tree arg1_type = TREE_TYPE (arg1);
4681 /* If we have A op 0 ? A : -A, consider applying the following
4684 A == 0? A : -A same as -A
4685 A != 0? A : -A same as A
4686 A >= 0? A : -A same as abs (A)
4687 A > 0? A : -A same as abs (A)
4688 A <= 0? A : -A same as -abs (A)
4689 A < 0? A : -A same as -abs (A)
4691 None of these transformations work for modes with signed
4692 zeros. If A is +/-0, the first two transformations will
4693 change the sign of the result (from +0 to -0, or vice
4694 versa). The last four will fix the sign of the result,
4695 even though the original expressions could be positive or
4696 negative, depending on the sign of A.
4698 Note that all these transformations are correct if A is
4699 NaN, since the two alternatives (A and -A) are also NaNs. */
4700 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4701 ? real_zerop (arg01)
4702 : integer_zerop (arg01))
4703 && ((TREE_CODE (arg2) == NEGATE_EXPR
4704 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4705 /* In the case that A is of the form X-Y, '-A' (arg2) may
4706 have already been folded to Y-X, check for that. */
4707 || (TREE_CODE (arg1) == MINUS_EXPR
4708 && TREE_CODE (arg2) == MINUS_EXPR
4709 && operand_equal_p (TREE_OPERAND (arg1, 0),
4710 TREE_OPERAND (arg2, 1), 0)
4711 && operand_equal_p (TREE_OPERAND (arg1, 1),
4712 TREE_OPERAND (arg2, 0), 0))))
4717 tem = fold_convert (arg1_type, arg1);
4718 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4721 return pedantic_non_lvalue (fold_convert (type, arg1));
4724 if (flag_trapping_math)
4729 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4730 arg1 = fold_convert (lang_hooks.types.signed_type
4731 (TREE_TYPE (arg1)), arg1);
4732 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4733 return pedantic_non_lvalue (fold_convert (type, tem));
4736 if (flag_trapping_math)
4740 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4741 arg1 = fold_convert (lang_hooks.types.signed_type
4742 (TREE_TYPE (arg1)), arg1);
4743 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4744 return negate_expr (fold_convert (type, tem));
4746 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4750 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4751 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4752 both transformations are correct when A is NaN: A != 0
4753 is then true, and A == 0 is false. */
4755 if (integer_zerop (arg01) && integer_zerop (arg2))
4757 if (comp_code == NE_EXPR)
4758 return pedantic_non_lvalue (fold_convert (type, arg1));
4759 else if (comp_code == EQ_EXPR)
4760 return build_int_cst (type, 0);
4763 /* Try some transformations of A op B ? A : B.
4765 A == B? A : B same as B
4766 A != B? A : B same as A
4767 A >= B? A : B same as max (A, B)
4768 A > B? A : B same as max (B, A)
4769 A <= B? A : B same as min (A, B)
4770 A < B? A : B same as min (B, A)
4772 As above, these transformations don't work in the presence
4773 of signed zeros. For example, if A and B are zeros of
4774 opposite sign, the first two transformations will change
4775 the sign of the result. In the last four, the original
4776 expressions give different results for (A=+0, B=-0) and
4777 (A=-0, B=+0), but the transformed expressions do not.
4779 The first two transformations are correct if either A or B
4780 is a NaN. In the first transformation, the condition will
4781 be false, and B will indeed be chosen. In the case of the
4782 second transformation, the condition A != B will be true,
4783 and A will be chosen.
4785 The conversions to max() and min() are not correct if B is
4786 a number and A is not. The conditions in the original
4787 expressions will be false, so all four give B. The min()
4788 and max() versions would give a NaN instead. */
4789 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4790 /* Avoid these transformations if the COND_EXPR may be used
4791 as an lvalue in the C++ front-end. PR c++/19199. */
4793 || (strcmp (lang_hooks.name, "GNU C++") != 0
4794 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4795 || ! maybe_lvalue_p (arg1)
4796 || ! maybe_lvalue_p (arg2)))
4798 tree comp_op0 = arg00;
4799 tree comp_op1 = arg01;
4800 tree comp_type = TREE_TYPE (comp_op0);
4802 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4803 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4813 return pedantic_non_lvalue (fold_convert (type, arg2));
4815 return pedantic_non_lvalue (fold_convert (type, arg1));
4820 /* In C++ a ?: expression can be an lvalue, so put the
4821 operand which will be used if they are equal first
4822 so that we can convert this back to the
4823 corresponding COND_EXPR. */
4824 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4826 comp_op0 = fold_convert (comp_type, comp_op0);
4827 comp_op1 = fold_convert (comp_type, comp_op1);
4828 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4829 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4830 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4831 return pedantic_non_lvalue (fold_convert (type, tem));
4838 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4840 comp_op0 = fold_convert (comp_type, comp_op0);
4841 comp_op1 = fold_convert (comp_type, comp_op1);
4842 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4843 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4844 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4845 return pedantic_non_lvalue (fold_convert (type, tem));
4849 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4850 return pedantic_non_lvalue (fold_convert (type, arg2));
4853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4854 return pedantic_non_lvalue (fold_convert (type, arg1));
4857 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4862 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4863 we might still be able to simplify this. For example,
4864 if C1 is one less or one more than C2, this might have started
4865 out as a MIN or MAX and been transformed by this function.
4866 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4868 if (INTEGRAL_TYPE_P (type)
4869 && TREE_CODE (arg01) == INTEGER_CST
4870 && TREE_CODE (arg2) == INTEGER_CST)
4874 /* We can replace A with C1 in this case. */
4875 arg1 = fold_convert (type, arg01);
4876 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4879 /* If C1 is C2 + 1, this is min(A, C2). */
4880 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4882 && operand_equal_p (arg01,
4883 const_binop (PLUS_EXPR, arg2,
4884 build_int_cst (type, 1), 0),
4886 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4891 /* If C1 is C2 - 1, this is min(A, C2). */
4892 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4894 && operand_equal_p (arg01,
4895 const_binop (MINUS_EXPR, arg2,
4896 build_int_cst (type, 1), 0),
4898 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4903 /* If C1 is C2 - 1, this is max(A, C2). */
4904 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4906 && operand_equal_p (arg01,
4907 const_binop (MINUS_EXPR, arg2,
4908 build_int_cst (type, 1), 0),
4910 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4915 /* If C1 is C2 + 1, this is max(A, C2). */
4916 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4918 && operand_equal_p (arg01,
4919 const_binop (PLUS_EXPR, arg2,
4920 build_int_cst (type, 1), 0),
4922 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4936 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4937 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4940 /* EXP is some logical combination of boolean tests. See if we can
4941 merge it into some range test. Return the new tree if so. */
4944 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4946 int or_op = (code == TRUTH_ORIF_EXPR
4947 || code == TRUTH_OR_EXPR);
4948 int in0_p, in1_p, in_p;
4949 tree low0, low1, low, high0, high1, high;
4950 bool strict_overflow_p = false;
4951 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4952 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4954 const char * const warnmsg = G_("assuming signed overflow does not occur "
4955 "when simplifying range test");
4957 /* If this is an OR operation, invert both sides; we will invert
4958 again at the end. */
4960 in0_p = ! in0_p, in1_p = ! in1_p;
4962 /* If both expressions are the same, if we can merge the ranges, and we
4963 can build the range test, return it or it inverted. If one of the
4964 ranges is always true or always false, consider it to be the same
4965 expression as the other. */
4966 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4967 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4969 && 0 != (tem = (build_range_check (type,
4971 : rhs != 0 ? rhs : integer_zero_node,
4974 if (strict_overflow_p)
4975 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4976 return or_op ? invert_truthvalue (tem) : tem;
4979 /* On machines where the branch cost is expensive, if this is a
4980 short-circuited branch and the underlying object on both sides
4981 is the same, make a non-short-circuit operation. */
4982 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4983 && lhs != 0 && rhs != 0
4984 && (code == TRUTH_ANDIF_EXPR
4985 || code == TRUTH_ORIF_EXPR)
4986 && operand_equal_p (lhs, rhs, 0))
4988 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4989 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4990 which cases we can't do this. */
4991 if (simple_operand_p (lhs))
4992 return build2 (code == TRUTH_ANDIF_EXPR
4993 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4996 else if (lang_hooks.decls.global_bindings_p () == 0
4997 && ! CONTAINS_PLACEHOLDER_P (lhs))
4999 tree common = save_expr (lhs);
5001 if (0 != (lhs = build_range_check (type, common,
5002 or_op ? ! in0_p : in0_p,
5004 && (0 != (rhs = build_range_check (type, common,
5005 or_op ? ! in1_p : in1_p,
5008 if (strict_overflow_p)
5009 fold_overflow_warning (warnmsg,
5010 WARN_STRICT_OVERFLOW_COMPARISON);
5011 return build2 (code == TRUTH_ANDIF_EXPR
5012 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5021 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5022 bit value. Arrange things so the extra bits will be set to zero if and
5023 only if C is signed-extended to its full width. If MASK is nonzero,
5024 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5027 unextend (tree c, int p, int unsignedp, tree mask)
5029 tree type = TREE_TYPE (c);
5030 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5033 if (p == modesize || unsignedp)
5036 /* We work by getting just the sign bit into the low-order bit, then
5037 into the high-order bit, then sign-extend. We then XOR that value
5039 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5040 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5042 /* We must use a signed type in order to get an arithmetic right shift.
5043 However, we must also avoid introducing accidental overflows, so that
5044 a subsequent call to integer_zerop will work. Hence we must
5045 do the type conversion here. At this point, the constant is either
5046 zero or one, and the conversion to a signed type can never overflow.
5047 We could get an overflow if this conversion is done anywhere else. */
5048 if (TYPE_UNSIGNED (type))
5049 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
5051 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5052 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5054 temp = const_binop (BIT_AND_EXPR, temp,
5055 fold_convert (TREE_TYPE (c), mask), 0);
5056 /* If necessary, convert the type back to match the type of C. */
5057 if (TYPE_UNSIGNED (type))
5058 temp = fold_convert (type, temp);
5060 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5063 /* Find ways of folding logical expressions of LHS and RHS:
5064 Try to merge two comparisons to the same innermost item.
5065 Look for range tests like "ch >= '0' && ch <= '9'".
5066 Look for combinations of simple terms on machines with expensive branches
5067 and evaluate the RHS unconditionally.
5069 For example, if we have p->a == 2 && p->b == 4 and we can make an
5070 object large enough to span both A and B, we can do this with a comparison
5071 against the object ANDed with the a mask.
5073 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5074 operations to do this with one comparison.
5076 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5077 function and the one above.
5079 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5080 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5082 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5085 We return the simplified tree or 0 if no optimization is possible. */
5088 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5090 /* If this is the "or" of two comparisons, we can do something if
5091 the comparisons are NE_EXPR. If this is the "and", we can do something
5092 if the comparisons are EQ_EXPR. I.e.,
5093 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5095 WANTED_CODE is this operation code. For single bit fields, we can
5096 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5097 comparison for one-bit fields. */
5099 enum tree_code wanted_code;
5100 enum tree_code lcode, rcode;
5101 tree ll_arg, lr_arg, rl_arg, rr_arg;
5102 tree ll_inner, lr_inner, rl_inner, rr_inner;
5103 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5104 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5105 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5106 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5107 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5108 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5109 enum machine_mode lnmode, rnmode;
5110 tree ll_mask, lr_mask, rl_mask, rr_mask;
5111 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5112 tree l_const, r_const;
5113 tree lntype, rntype, result;
5114 int first_bit, end_bit;
5116 tree orig_lhs = lhs, orig_rhs = rhs;
5117 enum tree_code orig_code = code;
5119 /* Start by getting the comparison codes. Fail if anything is volatile.
5120 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5121 it were surrounded with a NE_EXPR. */
5123 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5126 lcode = TREE_CODE (lhs);
5127 rcode = TREE_CODE (rhs);
5129 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5131 lhs = build2 (NE_EXPR, truth_type, lhs,
5132 build_int_cst (TREE_TYPE (lhs), 0));
5136 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5138 rhs = build2 (NE_EXPR, truth_type, rhs,
5139 build_int_cst (TREE_TYPE (rhs), 0));
5143 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5144 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5147 ll_arg = TREE_OPERAND (lhs, 0);
5148 lr_arg = TREE_OPERAND (lhs, 1);
5149 rl_arg = TREE_OPERAND (rhs, 0);
5150 rr_arg = TREE_OPERAND (rhs, 1);
5152 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5153 if (simple_operand_p (ll_arg)
5154 && simple_operand_p (lr_arg))
5157 if (operand_equal_p (ll_arg, rl_arg, 0)
5158 && operand_equal_p (lr_arg, rr_arg, 0))
5160 result = combine_comparisons (code, lcode, rcode,
5161 truth_type, ll_arg, lr_arg);
5165 else if (operand_equal_p (ll_arg, rr_arg, 0)
5166 && operand_equal_p (lr_arg, rl_arg, 0))
5168 result = combine_comparisons (code, lcode,
5169 swap_tree_comparison (rcode),
5170 truth_type, ll_arg, lr_arg);
5176 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5177 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5179 /* If the RHS can be evaluated unconditionally and its operands are
5180 simple, it wins to evaluate the RHS unconditionally on machines
5181 with expensive branches. In this case, this isn't a comparison
5182 that can be merged. Avoid doing this if the RHS is a floating-point
5183 comparison since those can trap. */
5185 if (BRANCH_COST >= 2
5186 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5187 && simple_operand_p (rl_arg)
5188 && simple_operand_p (rr_arg))
5190 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5191 if (code == TRUTH_OR_EXPR
5192 && lcode == NE_EXPR && integer_zerop (lr_arg)
5193 && rcode == NE_EXPR && integer_zerop (rr_arg)
5194 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5195 return build2 (NE_EXPR, truth_type,
5196 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5198 build_int_cst (TREE_TYPE (ll_arg), 0));
5200 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5201 if (code == TRUTH_AND_EXPR
5202 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5203 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5204 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5205 return build2 (EQ_EXPR, truth_type,
5206 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5208 build_int_cst (TREE_TYPE (ll_arg), 0));
5210 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5212 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5213 return build2 (code, truth_type, lhs, rhs);
5218 /* See if the comparisons can be merged. Then get all the parameters for
5221 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5222 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5226 ll_inner = decode_field_reference (ll_arg,
5227 &ll_bitsize, &ll_bitpos, &ll_mode,
5228 &ll_unsignedp, &volatilep, &ll_mask,
5230 lr_inner = decode_field_reference (lr_arg,
5231 &lr_bitsize, &lr_bitpos, &lr_mode,
5232 &lr_unsignedp, &volatilep, &lr_mask,
5234 rl_inner = decode_field_reference (rl_arg,
5235 &rl_bitsize, &rl_bitpos, &rl_mode,
5236 &rl_unsignedp, &volatilep, &rl_mask,
5238 rr_inner = decode_field_reference (rr_arg,
5239 &rr_bitsize, &rr_bitpos, &rr_mode,
5240 &rr_unsignedp, &volatilep, &rr_mask,
5243 /* It must be true that the inner operation on the lhs of each
5244 comparison must be the same if we are to be able to do anything.
5245 Then see if we have constants. If not, the same must be true for
5247 if (volatilep || ll_inner == 0 || rl_inner == 0
5248 || ! operand_equal_p (ll_inner, rl_inner, 0))
5251 if (TREE_CODE (lr_arg) == INTEGER_CST
5252 && TREE_CODE (rr_arg) == INTEGER_CST)
5253 l_const = lr_arg, r_const = rr_arg;
5254 else if (lr_inner == 0 || rr_inner == 0
5255 || ! operand_equal_p (lr_inner, rr_inner, 0))
5258 l_const = r_const = 0;
5260 /* If either comparison code is not correct for our logical operation,
5261 fail. However, we can convert a one-bit comparison against zero into
5262 the opposite comparison against that bit being set in the field. */
5264 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5265 if (lcode != wanted_code)
5267 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5269 /* Make the left operand unsigned, since we are only interested
5270 in the value of one bit. Otherwise we are doing the wrong
5279 /* This is analogous to the code for l_const above. */
5280 if (rcode != wanted_code)
5282 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5291 /* See if we can find a mode that contains both fields being compared on
5292 the left. If we can't, fail. Otherwise, update all constants and masks
5293 to be relative to a field of that size. */
5294 first_bit = MIN (ll_bitpos, rl_bitpos);
5295 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5296 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5297 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5299 if (lnmode == VOIDmode)
5302 lnbitsize = GET_MODE_BITSIZE (lnmode);
5303 lnbitpos = first_bit & ~ (lnbitsize - 1);
5304 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5305 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5307 if (BYTES_BIG_ENDIAN)
5309 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5310 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5313 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5314 size_int (xll_bitpos), 0);
5315 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5316 size_int (xrl_bitpos), 0);
5320 l_const = fold_convert (lntype, l_const);
5321 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5322 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5323 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5324 fold_build1 (BIT_NOT_EXPR,
5328 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5330 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5335 r_const = fold_convert (lntype, r_const);
5336 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5337 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5338 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5339 fold_build1 (BIT_NOT_EXPR,
5343 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5345 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5349 /* If the right sides are not constant, do the same for it. Also,
5350 disallow this optimization if a size or signedness mismatch occurs
5351 between the left and right sides. */
5354 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5355 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5356 /* Make sure the two fields on the right
5357 correspond to the left without being swapped. */
5358 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5361 first_bit = MIN (lr_bitpos, rr_bitpos);
5362 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5363 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5364 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5366 if (rnmode == VOIDmode)
5369 rnbitsize = GET_MODE_BITSIZE (rnmode);
5370 rnbitpos = first_bit & ~ (rnbitsize - 1);
5371 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5372 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5374 if (BYTES_BIG_ENDIAN)
5376 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5377 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5380 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5381 size_int (xlr_bitpos), 0);
5382 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5383 size_int (xrr_bitpos), 0);
5385 /* Make a mask that corresponds to both fields being compared.
5386 Do this for both items being compared. If the operands are the
5387 same size and the bits being compared are in the same position
5388 then we can do this by masking both and comparing the masked
5390 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5391 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5392 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5394 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5395 ll_unsignedp || rl_unsignedp);
5396 if (! all_ones_mask_p (ll_mask, lnbitsize))
5397 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5399 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5400 lr_unsignedp || rr_unsignedp);
5401 if (! all_ones_mask_p (lr_mask, rnbitsize))
5402 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5404 return build2 (wanted_code, truth_type, lhs, rhs);
5407 /* There is still another way we can do something: If both pairs of
5408 fields being compared are adjacent, we may be able to make a wider
5409 field containing them both.
5411 Note that we still must mask the lhs/rhs expressions. Furthermore,
5412 the mask must be shifted to account for the shift done by
5413 make_bit_field_ref. */
5414 if ((ll_bitsize + ll_bitpos == rl_bitpos
5415 && lr_bitsize + lr_bitpos == rr_bitpos)
5416 || (ll_bitpos == rl_bitpos + rl_bitsize
5417 && lr_bitpos == rr_bitpos + rr_bitsize))
5421 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5422 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5423 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5424 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5426 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5427 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5428 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5429 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5431 /* Convert to the smaller type before masking out unwanted bits. */
5433 if (lntype != rntype)
5435 if (lnbitsize > rnbitsize)
5437 lhs = fold_convert (rntype, lhs);
5438 ll_mask = fold_convert (rntype, ll_mask);
5441 else if (lnbitsize < rnbitsize)
5443 rhs = fold_convert (lntype, rhs);
5444 lr_mask = fold_convert (lntype, lr_mask);
5449 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5450 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5452 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5453 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5455 return build2 (wanted_code, truth_type, lhs, rhs);
5461 /* Handle the case of comparisons with constants. If there is something in
5462 common between the masks, those bits of the constants must be the same.
5463 If not, the condition is always false. Test for this to avoid generating
5464 incorrect code below. */
5465 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5466 if (! integer_zerop (result)
5467 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5468 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5470 if (wanted_code == NE_EXPR)
5472 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5473 return constant_boolean_node (true, truth_type);
5477 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5478 return constant_boolean_node (false, truth_type);
5482 /* Construct the expression we will return. First get the component
5483 reference we will make. Unless the mask is all ones the width of
5484 that field, perform the mask operation. Then compare with the
5486 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5487 ll_unsignedp || rl_unsignedp);
5489 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5490 if (! all_ones_mask_p (ll_mask, lnbitsize))
5491 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5493 return build2 (wanted_code, truth_type, result,
5494 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5497 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5501 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5504 enum tree_code op_code;
5505 tree comp_const = op1;
5507 int consts_equal, consts_lt;
5510 STRIP_SIGN_NOPS (arg0);
5512 op_code = TREE_CODE (arg0);
5513 minmax_const = TREE_OPERAND (arg0, 1);
5514 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5515 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5516 inner = TREE_OPERAND (arg0, 0);
5518 /* If something does not permit us to optimize, return the original tree. */
5519 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5520 || TREE_CODE (comp_const) != INTEGER_CST
5521 || TREE_OVERFLOW (comp_const)
5522 || TREE_CODE (minmax_const) != INTEGER_CST
5523 || TREE_OVERFLOW (minmax_const))
5526 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5527 and GT_EXPR, doing the rest with recursive calls using logical
5531 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5533 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5536 return invert_truthvalue (tem);
5542 fold_build2 (TRUTH_ORIF_EXPR, type,
5543 optimize_minmax_comparison
5544 (EQ_EXPR, type, arg0, comp_const),
5545 optimize_minmax_comparison
5546 (GT_EXPR, type, arg0, comp_const));
5549 if (op_code == MAX_EXPR && consts_equal)
5550 /* MAX (X, 0) == 0 -> X <= 0 */
5551 return fold_build2 (LE_EXPR, type, inner, comp_const);
5553 else if (op_code == MAX_EXPR && consts_lt)
5554 /* MAX (X, 0) == 5 -> X == 5 */
5555 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5557 else if (op_code == MAX_EXPR)
5558 /* MAX (X, 0) == -1 -> false */
5559 return omit_one_operand (type, integer_zero_node, inner);
5561 else if (consts_equal)
5562 /* MIN (X, 0) == 0 -> X >= 0 */
5563 return fold_build2 (GE_EXPR, type, inner, comp_const);
5566 /* MIN (X, 0) == 5 -> false */
5567 return omit_one_operand (type, integer_zero_node, inner);
5570 /* MIN (X, 0) == -1 -> X == -1 */
5571 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5574 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5575 /* MAX (X, 0) > 0 -> X > 0
5576 MAX (X, 0) > 5 -> X > 5 */
5577 return fold_build2 (GT_EXPR, type, inner, comp_const);
5579 else if (op_code == MAX_EXPR)
5580 /* MAX (X, 0) > -1 -> true */
5581 return omit_one_operand (type, integer_one_node, inner);
5583 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5584 /* MIN (X, 0) > 0 -> false
5585 MIN (X, 0) > 5 -> false */
5586 return omit_one_operand (type, integer_zero_node, inner);
5589 /* MIN (X, 0) > -1 -> X > -1 */
5590 return fold_build2 (GT_EXPR, type, inner, comp_const);
5597 /* T is an integer expression that is being multiplied, divided, or taken a
5598 modulus (CODE says which and what kind of divide or modulus) by a
5599 constant C. See if we can eliminate that operation by folding it with
5600 other operations already in T. WIDE_TYPE, if non-null, is a type that
5601 should be used for the computation if wider than our type.
5603 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5604 (X * 2) + (Y * 4). We must, however, be assured that either the original
5605 expression would not overflow or that overflow is undefined for the type
5606 in the language in question.
5608 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5609 the machine has a multiply-accumulate insn or that this is part of an
5610 addressing calculation.
5612 If we return a non-null expression, it is an equivalent form of the
5613 original computation, but need not be in the original type.
5615 We set *STRICT_OVERFLOW_P to true if the return values depends on
5616 signed overflow being undefined. Otherwise we do not change
5617 *STRICT_OVERFLOW_P. */
5620 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5621 bool *strict_overflow_p)
5623 /* To avoid exponential search depth, refuse to allow recursion past
5624 three levels. Beyond that (1) it's highly unlikely that we'll find
5625 something interesting and (2) we've probably processed it before
5626 when we built the inner expression. */
5635 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5642 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5643 bool *strict_overflow_p)
5645 tree type = TREE_TYPE (t);
5646 enum tree_code tcode = TREE_CODE (t);
5647 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5648 > GET_MODE_SIZE (TYPE_MODE (type)))
5649 ? wide_type : type);
5651 int same_p = tcode == code;
5652 tree op0 = NULL_TREE, op1 = NULL_TREE;
5653 bool sub_strict_overflow_p;
5655 /* Don't deal with constants of zero here; they confuse the code below. */
5656 if (integer_zerop (c))
5659 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5660 op0 = TREE_OPERAND (t, 0);
5662 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5663 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5665 /* Note that we need not handle conditional operations here since fold
5666 already handles those cases. So just do arithmetic here. */
5670 /* For a constant, we can always simplify if we are a multiply
5671 or (for divide and modulus) if it is a multiple of our constant. */
5672 if (code == MULT_EXPR
5673 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5674 return const_binop (code, fold_convert (ctype, t),
5675 fold_convert (ctype, c), 0);
5678 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5679 /* If op0 is an expression ... */
5680 if ((COMPARISON_CLASS_P (op0)
5681 || UNARY_CLASS_P (op0)
5682 || BINARY_CLASS_P (op0)
5683 || VL_EXP_CLASS_P (op0)
5684 || EXPRESSION_CLASS_P (op0))
5685 /* ... and is unsigned, and its type is smaller than ctype,
5686 then we cannot pass through as widening. */
5687 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5688 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5689 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5690 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5691 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5692 /* ... or this is a truncation (t is narrower than op0),
5693 then we cannot pass through this narrowing. */
5694 || (GET_MODE_SIZE (TYPE_MODE (type))
5695 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5696 /* ... or signedness changes for division or modulus,
5697 then we cannot pass through this conversion. */
5698 || (code != MULT_EXPR
5699 && (TYPE_UNSIGNED (ctype)
5700 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5703 /* Pass the constant down and see if we can make a simplification. If
5704 we can, replace this expression with the inner simplification for
5705 possible later conversion to our or some other type. */
5706 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5707 && TREE_CODE (t2) == INTEGER_CST
5708 && !TREE_OVERFLOW (t2)
5709 && (0 != (t1 = extract_muldiv (op0, t2, code,
5711 ? ctype : NULL_TREE,
5712 strict_overflow_p))))
5717 /* If widening the type changes it from signed to unsigned, then we
5718 must avoid building ABS_EXPR itself as unsigned. */
5719 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5721 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5722 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5725 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5726 return fold_convert (ctype, t1);
5732 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5734 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5737 case MIN_EXPR: case MAX_EXPR:
5738 /* If widening the type changes the signedness, then we can't perform
5739 this optimization as that changes the result. */
5740 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5743 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5744 sub_strict_overflow_p = false;
5745 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5746 &sub_strict_overflow_p)) != 0
5747 && (t2 = extract_muldiv (op1, c, code, wide_type,
5748 &sub_strict_overflow_p)) != 0)
5750 if (tree_int_cst_sgn (c) < 0)
5751 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5752 if (sub_strict_overflow_p)
5753 *strict_overflow_p = true;
5754 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5755 fold_convert (ctype, t2));
5759 case LSHIFT_EXPR: case RSHIFT_EXPR:
5760 /* If the second operand is constant, this is a multiplication
5761 or floor division, by a power of two, so we can treat it that
5762 way unless the multiplier or divisor overflows. Signed
5763 left-shift overflow is implementation-defined rather than
5764 undefined in C90, so do not convert signed left shift into
5766 if (TREE_CODE (op1) == INTEGER_CST
5767 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5768 /* const_binop may not detect overflow correctly,
5769 so check for it explicitly here. */
5770 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5771 && TREE_INT_CST_HIGH (op1) == 0
5772 && 0 != (t1 = fold_convert (ctype,
5773 const_binop (LSHIFT_EXPR,
5776 && !TREE_OVERFLOW (t1))
5777 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5778 ? MULT_EXPR : FLOOR_DIV_EXPR,
5779 ctype, fold_convert (ctype, op0), t1),
5780 c, code, wide_type, strict_overflow_p);
5783 case PLUS_EXPR: case MINUS_EXPR:
5784 /* See if we can eliminate the operation on both sides. If we can, we
5785 can return a new PLUS or MINUS. If we can't, the only remaining
5786 cases where we can do anything are if the second operand is a
5788 sub_strict_overflow_p = false;
5789 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5790 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5791 if (t1 != 0 && t2 != 0
5792 && (code == MULT_EXPR
5793 /* If not multiplication, we can only do this if both operands
5794 are divisible by c. */
5795 || (multiple_of_p (ctype, op0, c)
5796 && multiple_of_p (ctype, op1, c))))
5798 if (sub_strict_overflow_p)
5799 *strict_overflow_p = true;
5800 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5801 fold_convert (ctype, t2));
5804 /* If this was a subtraction, negate OP1 and set it to be an addition.
5805 This simplifies the logic below. */
5806 if (tcode == MINUS_EXPR)
5807 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5809 if (TREE_CODE (op1) != INTEGER_CST)
5812 /* If either OP1 or C are negative, this optimization is not safe for
5813 some of the division and remainder types while for others we need
5814 to change the code. */
5815 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5817 if (code == CEIL_DIV_EXPR)
5818 code = FLOOR_DIV_EXPR;
5819 else if (code == FLOOR_DIV_EXPR)
5820 code = CEIL_DIV_EXPR;
5821 else if (code != MULT_EXPR
5822 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5826 /* If it's a multiply or a division/modulus operation of a multiple
5827 of our constant, do the operation and verify it doesn't overflow. */
5828 if (code == MULT_EXPR
5829 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5831 op1 = const_binop (code, fold_convert (ctype, op1),
5832 fold_convert (ctype, c), 0);
5833 /* We allow the constant to overflow with wrapping semantics. */
5835 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5841 /* If we have an unsigned type is not a sizetype, we cannot widen
5842 the operation since it will change the result if the original
5843 computation overflowed. */
5844 if (TYPE_UNSIGNED (ctype)
5845 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5849 /* If we were able to eliminate our operation from the first side,
5850 apply our operation to the second side and reform the PLUS. */
5851 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5852 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5854 /* The last case is if we are a multiply. In that case, we can
5855 apply the distributive law to commute the multiply and addition
5856 if the multiplication of the constants doesn't overflow. */
5857 if (code == MULT_EXPR)
5858 return fold_build2 (tcode, ctype,
5859 fold_build2 (code, ctype,
5860 fold_convert (ctype, op0),
5861 fold_convert (ctype, c)),
5867 /* We have a special case here if we are doing something like
5868 (C * 8) % 4 since we know that's zero. */
5869 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5870 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5871 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5872 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5873 return omit_one_operand (type, integer_zero_node, op0);
5875 /* ... fall through ... */
5877 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5878 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5879 /* If we can extract our operation from the LHS, do so and return a
5880 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5881 do something only if the second operand is a constant. */
5883 && (t1 = extract_muldiv (op0, c, code, wide_type,
5884 strict_overflow_p)) != 0)
5885 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5886 fold_convert (ctype, op1));
5887 else if (tcode == MULT_EXPR && code == MULT_EXPR
5888 && (t1 = extract_muldiv (op1, c, code, wide_type,
5889 strict_overflow_p)) != 0)
5890 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5891 fold_convert (ctype, t1));
5892 else if (TREE_CODE (op1) != INTEGER_CST)
5895 /* If these are the same operation types, we can associate them
5896 assuming no overflow. */
5898 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5899 fold_convert (ctype, c), 0))
5900 && !TREE_OVERFLOW (t1))
5901 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5903 /* If these operations "cancel" each other, we have the main
5904 optimizations of this pass, which occur when either constant is a
5905 multiple of the other, in which case we replace this with either an
5906 operation or CODE or TCODE.
5908 If we have an unsigned type that is not a sizetype, we cannot do
5909 this since it will change the result if the original computation
5911 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5912 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5913 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5914 || (tcode == MULT_EXPR
5915 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5916 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5918 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5920 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5921 *strict_overflow_p = true;
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5923 fold_convert (ctype,
5924 const_binop (TRUNC_DIV_EXPR,
5927 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5929 if (TYPE_OVERFLOW_UNDEFINED (ctype))
5930 *strict_overflow_p = true;
5931 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5932 fold_convert (ctype,
5933 const_binop (TRUNC_DIV_EXPR,
5946 /* Return a node which has the indicated constant VALUE (either 0 or
5947 1), and is of the indicated TYPE. */
5950 constant_boolean_node (int value, tree type)
5952 if (type == integer_type_node)
5953 return value ? integer_one_node : integer_zero_node;
5954 else if (type == boolean_type_node)
5955 return value ? boolean_true_node : boolean_false_node;
5957 return build_int_cst (type, value);
5961 /* Return true if expr looks like an ARRAY_REF and set base and
5962 offset to the appropriate trees. If there is no offset,
5963 offset is set to NULL_TREE. Base will be canonicalized to
5964 something you can get the element type from using
5965 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5966 in bytes to the base. */
5969 extract_array_ref (tree expr, tree *base, tree *offset)
5971 /* One canonical form is a PLUS_EXPR with the first
5972 argument being an ADDR_EXPR with a possible NOP_EXPR
5974 if (TREE_CODE (expr) == PLUS_EXPR)
5976 tree op0 = TREE_OPERAND (expr, 0);
5977 tree inner_base, dummy1;
5978 /* Strip NOP_EXPRs here because the C frontends and/or
5979 folders present us (int *)&x.a + 4B possibly. */
5981 if (extract_array_ref (op0, &inner_base, &dummy1))
5984 if (dummy1 == NULL_TREE)
5985 *offset = TREE_OPERAND (expr, 1);
5987 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5988 dummy1, TREE_OPERAND (expr, 1));
5992 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5993 which we transform into an ADDR_EXPR with appropriate
5994 offset. For other arguments to the ADDR_EXPR we assume
5995 zero offset and as such do not care about the ADDR_EXPR
5996 type and strip possible nops from it. */
5997 else if (TREE_CODE (expr) == ADDR_EXPR)
5999 tree op0 = TREE_OPERAND (expr, 0);
6000 if (TREE_CODE (op0) == ARRAY_REF)
6002 tree idx = TREE_OPERAND (op0, 1);
6003 *base = TREE_OPERAND (op0, 0);
6004 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6005 array_ref_element_size (op0));
6009 /* Handle array-to-pointer decay as &a. */
6010 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6011 *base = TREE_OPERAND (expr, 0);
6014 *offset = NULL_TREE;
6018 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6019 else if (SSA_VAR_P (expr)
6020 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6023 *offset = NULL_TREE;
6031 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6032 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6033 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6034 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6035 COND is the first argument to CODE; otherwise (as in the example
6036 given here), it is the second argument. TYPE is the type of the
6037 original expression. Return NULL_TREE if no simplification is
6041 fold_binary_op_with_conditional_arg (enum tree_code code,
6042 tree type, tree op0, tree op1,
6043 tree cond, tree arg, int cond_first_p)
6045 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6046 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6047 tree test, true_value, false_value;
6048 tree lhs = NULL_TREE;
6049 tree rhs = NULL_TREE;
6051 /* This transformation is only worthwhile if we don't have to wrap
6052 arg in a SAVE_EXPR, and the operation can be simplified on at least
6053 one of the branches once its pushed inside the COND_EXPR. */
6054 if (!TREE_CONSTANT (arg))
6057 if (TREE_CODE (cond) == COND_EXPR)
6059 test = TREE_OPERAND (cond, 0);
6060 true_value = TREE_OPERAND (cond, 1);
6061 false_value = TREE_OPERAND (cond, 2);
6062 /* If this operand throws an expression, then it does not make
6063 sense to try to perform a logical or arithmetic operation
6065 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6067 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6072 tree testtype = TREE_TYPE (cond);
6074 true_value = constant_boolean_node (true, testtype);
6075 false_value = constant_boolean_node (false, testtype);
6078 arg = fold_convert (arg_type, arg);
6081 true_value = fold_convert (cond_type, true_value);
6083 lhs = fold_build2 (code, type, true_value, arg);
6085 lhs = fold_build2 (code, type, arg, true_value);
6089 false_value = fold_convert (cond_type, false_value);
6091 rhs = fold_build2 (code, type, false_value, arg);
6093 rhs = fold_build2 (code, type, arg, false_value);
6096 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6097 return fold_convert (type, test);
6101 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6103 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6104 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6105 ADDEND is the same as X.
6107 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6108 and finite. The problematic cases are when X is zero, and its mode
6109 has signed zeros. In the case of rounding towards -infinity,
6110 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6111 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6114 fold_real_zero_addition_p (tree type, tree addend, int negate)
6116 if (!real_zerop (addend))
6119 /* Don't allow the fold with -fsignaling-nans. */
6120 if (HONOR_SNANS (TYPE_MODE (type)))
6123 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6124 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6127 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6128 if (TREE_CODE (addend) == REAL_CST
6129 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6132 /* The mode has signed zeros, and we have to honor their sign.
6133 In this situation, there is only one case we can return true for.
6134 X - 0 is the same as X unless rounding towards -infinity is
6136 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6139 /* Subroutine of fold() that checks comparisons of built-in math
6140 functions against real constants.
6142 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6143 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6144 is the type of the result and ARG0 and ARG1 are the operands of the
6145 comparison. ARG1 must be a TREE_REAL_CST.
6147 The function returns the constant folded tree if a simplification
6148 can be made, and NULL_TREE otherwise. */
6151 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6152 tree type, tree arg0, tree arg1)
6156 if (BUILTIN_SQRT_P (fcode))
6158 tree arg = CALL_EXPR_ARG (arg0, 0);
6159 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6161 c = TREE_REAL_CST (arg1);
6162 if (REAL_VALUE_NEGATIVE (c))
6164 /* sqrt(x) < y is always false, if y is negative. */
6165 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6166 return omit_one_operand (type, integer_zero_node, arg);
6168 /* sqrt(x) > y is always true, if y is negative and we
6169 don't care about NaNs, i.e. negative values of x. */
6170 if (code == NE_EXPR || !HONOR_NANS (mode))
6171 return omit_one_operand (type, integer_one_node, arg);
6173 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6174 return fold_build2 (GE_EXPR, type, arg,
6175 build_real (TREE_TYPE (arg), dconst0));
6177 else if (code == GT_EXPR || code == GE_EXPR)
6181 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6182 real_convert (&c2, mode, &c2);
6184 if (REAL_VALUE_ISINF (c2))
6186 /* sqrt(x) > y is x == +Inf, when y is very large. */
6187 if (HONOR_INFINITIES (mode))
6188 return fold_build2 (EQ_EXPR, type, arg,
6189 build_real (TREE_TYPE (arg), c2));
6191 /* sqrt(x) > y is always false, when y is very large
6192 and we don't care about infinities. */
6193 return omit_one_operand (type, integer_zero_node, arg);
6196 /* sqrt(x) > c is the same as x > c*c. */
6197 return fold_build2 (code, type, arg,
6198 build_real (TREE_TYPE (arg), c2));
6200 else if (code == LT_EXPR || code == LE_EXPR)
6204 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6205 real_convert (&c2, mode, &c2);
6207 if (REAL_VALUE_ISINF (c2))
6209 /* sqrt(x) < y is always true, when y is a very large
6210 value and we don't care about NaNs or Infinities. */
6211 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6212 return omit_one_operand (type, integer_one_node, arg);
6214 /* sqrt(x) < y is x != +Inf when y is very large and we
6215 don't care about NaNs. */
6216 if (! HONOR_NANS (mode))
6217 return fold_build2 (NE_EXPR, type, arg,
6218 build_real (TREE_TYPE (arg), c2));
6220 /* sqrt(x) < y is x >= 0 when y is very large and we
6221 don't care about Infinities. */
6222 if (! HONOR_INFINITIES (mode))
6223 return fold_build2 (GE_EXPR, type, arg,
6224 build_real (TREE_TYPE (arg), dconst0));
6226 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6227 if (lang_hooks.decls.global_bindings_p () != 0
6228 || CONTAINS_PLACEHOLDER_P (arg))
6231 arg = save_expr (arg);
6232 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6233 fold_build2 (GE_EXPR, type, arg,
6234 build_real (TREE_TYPE (arg),
6236 fold_build2 (NE_EXPR, type, arg,
6237 build_real (TREE_TYPE (arg),
6241 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6242 if (! HONOR_NANS (mode))
6243 return fold_build2 (code, type, arg,
6244 build_real (TREE_TYPE (arg), c2));
6246 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6247 if (lang_hooks.decls.global_bindings_p () == 0
6248 && ! CONTAINS_PLACEHOLDER_P (arg))
6250 arg = save_expr (arg);
6251 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6252 fold_build2 (GE_EXPR, type, arg,
6253 build_real (TREE_TYPE (arg),
6255 fold_build2 (code, type, arg,
6256 build_real (TREE_TYPE (arg),
6265 /* Subroutine of fold() that optimizes comparisons against Infinities,
6266 either +Inf or -Inf.
6268 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6269 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6270 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6272 The function returns the constant folded tree if a simplification
6273 can be made, and NULL_TREE otherwise. */
6276 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6278 enum machine_mode mode;
6279 REAL_VALUE_TYPE max;
6283 mode = TYPE_MODE (TREE_TYPE (arg0));
6285 /* For negative infinity swap the sense of the comparison. */
6286 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6288 code = swap_tree_comparison (code);
6293 /* x > +Inf is always false, if with ignore sNANs. */
6294 if (HONOR_SNANS (mode))
6296 return omit_one_operand (type, integer_zero_node, arg0);
6299 /* x <= +Inf is always true, if we don't case about NaNs. */
6300 if (! HONOR_NANS (mode))
6301 return omit_one_operand (type, integer_one_node, arg0);
6303 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6304 if (lang_hooks.decls.global_bindings_p () == 0
6305 && ! CONTAINS_PLACEHOLDER_P (arg0))
6307 arg0 = save_expr (arg0);
6308 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6314 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6315 real_maxval (&max, neg, mode);
6316 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6317 arg0, build_real (TREE_TYPE (arg0), max));
6320 /* x < +Inf is always equal to x <= DBL_MAX. */
6321 real_maxval (&max, neg, mode);
6322 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6323 arg0, build_real (TREE_TYPE (arg0), max));
6326 /* x != +Inf is always equal to !(x > DBL_MAX). */
6327 real_maxval (&max, neg, mode);
6328 if (! HONOR_NANS (mode))
6329 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6330 arg0, build_real (TREE_TYPE (arg0), max));
6332 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6333 arg0, build_real (TREE_TYPE (arg0), max));
6334 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6343 /* Subroutine of fold() that optimizes comparisons of a division by
6344 a nonzero integer constant against an integer constant, i.e.
6347 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6348 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6349 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6351 The function returns the constant folded tree if a simplification
6352 can be made, and NULL_TREE otherwise. */
6355 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6357 tree prod, tmp, hi, lo;
6358 tree arg00 = TREE_OPERAND (arg0, 0);
6359 tree arg01 = TREE_OPERAND (arg0, 1);
6360 unsigned HOST_WIDE_INT lpart;
6361 HOST_WIDE_INT hpart;
6362 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6366 /* We have to do this the hard way to detect unsigned overflow.
6367 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6368 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6369 TREE_INT_CST_HIGH (arg01),
6370 TREE_INT_CST_LOW (arg1),
6371 TREE_INT_CST_HIGH (arg1),
6372 &lpart, &hpart, unsigned_p);
6373 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6375 neg_overflow = false;
6379 tmp = int_const_binop (MINUS_EXPR, arg01,
6380 build_int_cst (TREE_TYPE (arg01), 1), 0);
6383 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6384 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6385 TREE_INT_CST_HIGH (prod),
6386 TREE_INT_CST_LOW (tmp),
6387 TREE_INT_CST_HIGH (tmp),
6388 &lpart, &hpart, unsigned_p);
6389 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6390 -1, overflow | TREE_OVERFLOW (prod));
6392 else if (tree_int_cst_sgn (arg01) >= 0)
6394 tmp = int_const_binop (MINUS_EXPR, arg01,
6395 build_int_cst (TREE_TYPE (arg01), 1), 0);
6396 switch (tree_int_cst_sgn (arg1))
6399 neg_overflow = true;
6400 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6405 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6410 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6420 /* A negative divisor reverses the relational operators. */
6421 code = swap_tree_comparison (code);
6423 tmp = int_const_binop (PLUS_EXPR, arg01,
6424 build_int_cst (TREE_TYPE (arg01), 1), 0);
6425 switch (tree_int_cst_sgn (arg1))
6428 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6433 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6438 neg_overflow = true;
6439 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6451 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6452 return omit_one_operand (type, integer_zero_node, arg00);
6453 if (TREE_OVERFLOW (hi))
6454 return fold_build2 (GE_EXPR, type, arg00, lo);
6455 if (TREE_OVERFLOW (lo))
6456 return fold_build2 (LE_EXPR, type, arg00, hi);
6457 return build_range_check (type, arg00, 1, lo, hi);
6460 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6461 return omit_one_operand (type, integer_one_node, arg00);
6462 if (TREE_OVERFLOW (hi))
6463 return fold_build2 (LT_EXPR, type, arg00, lo);
6464 if (TREE_OVERFLOW (lo))
6465 return fold_build2 (GT_EXPR, type, arg00, hi);
6466 return build_range_check (type, arg00, 0, lo, hi);
6469 if (TREE_OVERFLOW (lo))
6471 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6472 return omit_one_operand (type, tmp, arg00);
6474 return fold_build2 (LT_EXPR, type, arg00, lo);
6477 if (TREE_OVERFLOW (hi))
6479 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6480 return omit_one_operand (type, tmp, arg00);
6482 return fold_build2 (LE_EXPR, type, arg00, hi);
6485 if (TREE_OVERFLOW (hi))
6487 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6488 return omit_one_operand (type, tmp, arg00);
6490 return fold_build2 (GT_EXPR, type, arg00, hi);
6493 if (TREE_OVERFLOW (lo))
6495 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6496 return omit_one_operand (type, tmp, arg00);
6498 return fold_build2 (GE_EXPR, type, arg00, lo);
6508 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6509 equality/inequality test, then return a simplified form of the test
6510 using a sign testing. Otherwise return NULL. TYPE is the desired
6514 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6517 /* If this is testing a single bit, we can optimize the test. */
6518 if ((code == NE_EXPR || code == EQ_EXPR)
6519 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6520 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6522 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6523 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6524 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6526 if (arg00 != NULL_TREE
6527 /* This is only a win if casting to a signed type is cheap,
6528 i.e. when arg00's type is not a partial mode. */
6529 && TYPE_PRECISION (TREE_TYPE (arg00))
6530 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6532 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6533 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6534 result_type, fold_convert (stype, arg00),
6535 build_int_cst (stype, 0));
6542 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6543 equality/inequality test, then return a simplified form of
6544 the test using shifts and logical operations. Otherwise return
6545 NULL. TYPE is the desired result type. */
6548 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6551 /* If this is testing a single bit, we can optimize the test. */
6552 if ((code == NE_EXPR || code == EQ_EXPR)
6553 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6554 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6556 tree inner = TREE_OPERAND (arg0, 0);
6557 tree type = TREE_TYPE (arg0);
6558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6559 enum machine_mode operand_mode = TYPE_MODE (type);
6561 tree signed_type, unsigned_type, intermediate_type;
6564 /* First, see if we can fold the single bit test into a sign-bit
6566 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6571 /* Otherwise we have (A & C) != 0 where C is a single bit,
6572 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6573 Similarly for (A & C) == 0. */
6575 /* If INNER is a right shift of a constant and it plus BITNUM does
6576 not overflow, adjust BITNUM and INNER. */
6577 if (TREE_CODE (inner) == RSHIFT_EXPR
6578 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6579 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6580 && bitnum < TYPE_PRECISION (type)
6581 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6582 bitnum - TYPE_PRECISION (type)))
6584 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6585 inner = TREE_OPERAND (inner, 0);
6588 /* If we are going to be able to omit the AND below, we must do our
6589 operations as unsigned. If we must use the AND, we have a choice.
6590 Normally unsigned is faster, but for some machines signed is. */
6591 #ifdef LOAD_EXTEND_OP
6592 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6593 && !flag_syntax_only) ? 0 : 1;
6598 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6599 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6600 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6601 inner = fold_convert (intermediate_type, inner);
6604 inner = build2 (RSHIFT_EXPR, intermediate_type,
6605 inner, size_int (bitnum));
6607 one = build_int_cst (intermediate_type, 1);
6609 if (code == EQ_EXPR)
6610 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6612 /* Put the AND last so it can combine with more things. */
6613 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6615 /* Make sure to return the proper type. */
6616 inner = fold_convert (result_type, inner);
6623 /* Check whether we are allowed to reorder operands arg0 and arg1,
6624 such that the evaluation of arg1 occurs before arg0. */
6627 reorder_operands_p (tree arg0, tree arg1)
6629 if (! flag_evaluation_order)
6631 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6633 return ! TREE_SIDE_EFFECTS (arg0)
6634 && ! TREE_SIDE_EFFECTS (arg1);
6637 /* Test whether it is preferable two swap two operands, ARG0 and
6638 ARG1, for example because ARG0 is an integer constant and ARG1
6639 isn't. If REORDER is true, only recommend swapping if we can
6640 evaluate the operands in reverse order. */
6643 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6645 STRIP_SIGN_NOPS (arg0);
6646 STRIP_SIGN_NOPS (arg1);
6648 if (TREE_CODE (arg1) == INTEGER_CST)
6650 if (TREE_CODE (arg0) == INTEGER_CST)
6653 if (TREE_CODE (arg1) == REAL_CST)
6655 if (TREE_CODE (arg0) == REAL_CST)
6658 if (TREE_CODE (arg1) == COMPLEX_CST)
6660 if (TREE_CODE (arg0) == COMPLEX_CST)
6663 if (TREE_CONSTANT (arg1))
6665 if (TREE_CONSTANT (arg0))
6671 if (reorder && flag_evaluation_order
6672 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6675 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6676 for commutative and comparison operators. Ensuring a canonical
6677 form allows the optimizers to find additional redundancies without
6678 having to explicitly check for both orderings. */
6679 if (TREE_CODE (arg0) == SSA_NAME
6680 && TREE_CODE (arg1) == SSA_NAME
6681 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6684 /* Put SSA_NAMEs last. */
6685 if (TREE_CODE (arg1) == SSA_NAME)
6687 if (TREE_CODE (arg0) == SSA_NAME)
6690 /* Put variables last. */
6699 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6700 ARG0 is extended to a wider type. */
6703 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6705 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6707 tree shorter_type, outer_type;
6711 if (arg0_unw == arg0)
6713 shorter_type = TREE_TYPE (arg0_unw);
6715 #ifdef HAVE_canonicalize_funcptr_for_compare
6716 /* Disable this optimization if we're casting a function pointer
6717 type on targets that require function pointer canonicalization. */
6718 if (HAVE_canonicalize_funcptr_for_compare
6719 && TREE_CODE (shorter_type) == POINTER_TYPE
6720 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6724 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6727 arg1_unw = get_unwidened (arg1, shorter_type);
6729 /* If possible, express the comparison in the shorter mode. */
6730 if ((code == EQ_EXPR || code == NE_EXPR
6731 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6732 && (TREE_TYPE (arg1_unw) == shorter_type
6733 || (TREE_CODE (arg1_unw) == INTEGER_CST
6734 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6735 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6736 && int_fits_type_p (arg1_unw, shorter_type))))
6737 return fold_build2 (code, type, arg0_unw,
6738 fold_convert (shorter_type, arg1_unw));
6740 if (TREE_CODE (arg1_unw) != INTEGER_CST
6741 || TREE_CODE (shorter_type) != INTEGER_TYPE
6742 || !int_fits_type_p (arg1_unw, shorter_type))
6745 /* If we are comparing with the integer that does not fit into the range
6746 of the shorter type, the result is known. */
6747 outer_type = TREE_TYPE (arg1_unw);
6748 min = lower_bound_in_type (outer_type, shorter_type);
6749 max = upper_bound_in_type (outer_type, shorter_type);
6751 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6753 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6760 return omit_one_operand (type, integer_zero_node, arg0);
6765 return omit_one_operand (type, integer_one_node, arg0);
6771 return omit_one_operand (type, integer_one_node, arg0);
6773 return omit_one_operand (type, integer_zero_node, arg0);
6778 return omit_one_operand (type, integer_zero_node, arg0);
6780 return omit_one_operand (type, integer_one_node, arg0);
6789 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6790 ARG0 just the signedness is changed. */
6793 fold_sign_changed_comparison (enum tree_code code, tree type,
6794 tree arg0, tree arg1)
6797 tree inner_type, outer_type;
6799 if (TREE_CODE (arg0) != NOP_EXPR
6800 && TREE_CODE (arg0) != CONVERT_EXPR)
6803 outer_type = TREE_TYPE (arg0);
6804 arg0_inner = TREE_OPERAND (arg0, 0);
6805 inner_type = TREE_TYPE (arg0_inner);
6807 #ifdef HAVE_canonicalize_funcptr_for_compare
6808 /* Disable this optimization if we're casting a function pointer
6809 type on targets that require function pointer canonicalization. */
6810 if (HAVE_canonicalize_funcptr_for_compare
6811 && TREE_CODE (inner_type) == POINTER_TYPE
6812 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6816 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6819 if (TREE_CODE (arg1) != INTEGER_CST
6820 && !((TREE_CODE (arg1) == NOP_EXPR
6821 || TREE_CODE (arg1) == CONVERT_EXPR)
6822 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6825 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6830 if (TREE_CODE (arg1) == INTEGER_CST)
6831 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6832 TREE_INT_CST_HIGH (arg1), 0,
6833 TREE_OVERFLOW (arg1));
6835 arg1 = fold_convert (inner_type, arg1);
6837 return fold_build2 (code, type, arg0_inner, arg1);
6840 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6841 step of the array. Reconstructs s and delta in the case of s * delta
6842 being an integer constant (and thus already folded).
6843 ADDR is the address. MULT is the multiplicative expression.
6844 If the function succeeds, the new address expression is returned. Otherwise
6845 NULL_TREE is returned. */
6848 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6850 tree s, delta, step;
6851 tree ref = TREE_OPERAND (addr, 0), pref;
6856 /* Canonicalize op1 into a possibly non-constant delta
6857 and an INTEGER_CST s. */
6858 if (TREE_CODE (op1) == MULT_EXPR)
6860 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6865 if (TREE_CODE (arg0) == INTEGER_CST)
6870 else if (TREE_CODE (arg1) == INTEGER_CST)
6878 else if (TREE_CODE (op1) == INTEGER_CST)
6885 /* Simulate we are delta * 1. */
6887 s = integer_one_node;
6890 for (;; ref = TREE_OPERAND (ref, 0))
6892 if (TREE_CODE (ref) == ARRAY_REF)
6894 /* Remember if this was a multi-dimensional array. */
6895 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6898 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6902 step = array_ref_element_size (ref);
6903 if (TREE_CODE (step) != INTEGER_CST)
6908 if (! tree_int_cst_equal (step, s))
6913 /* Try if delta is a multiple of step. */
6914 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6920 /* Only fold here if we can verify we do not overflow one
6921 dimension of a multi-dimensional array. */
6926 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6927 || !INTEGRAL_TYPE_P (itype)
6928 || !TYPE_MAX_VALUE (itype)
6929 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6932 tmp = fold_binary (code, itype,
6933 fold_convert (itype,
6934 TREE_OPERAND (ref, 1)),
6935 fold_convert (itype, delta));
6937 || TREE_CODE (tmp) != INTEGER_CST
6938 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6947 if (!handled_component_p (ref))
6951 /* We found the suitable array reference. So copy everything up to it,
6952 and replace the index. */
6954 pref = TREE_OPERAND (addr, 0);
6955 ret = copy_node (pref);
6960 pref = TREE_OPERAND (pref, 0);
6961 TREE_OPERAND (pos, 0) = copy_node (pref);
6962 pos = TREE_OPERAND (pos, 0);
6965 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6966 fold_convert (itype,
6967 TREE_OPERAND (pos, 1)),
6968 fold_convert (itype, delta));
6970 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6974 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6975 means A >= Y && A != MAX, but in this case we know that
6976 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6979 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6981 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6983 if (TREE_CODE (bound) == LT_EXPR)
6984 a = TREE_OPERAND (bound, 0);
6985 else if (TREE_CODE (bound) == GT_EXPR)
6986 a = TREE_OPERAND (bound, 1);
6990 typea = TREE_TYPE (a);
6991 if (!INTEGRAL_TYPE_P (typea)
6992 && !POINTER_TYPE_P (typea))
6995 if (TREE_CODE (ineq) == LT_EXPR)
6997 a1 = TREE_OPERAND (ineq, 1);
6998 y = TREE_OPERAND (ineq, 0);
7000 else if (TREE_CODE (ineq) == GT_EXPR)
7002 a1 = TREE_OPERAND (ineq, 0);
7003 y = TREE_OPERAND (ineq, 1);
7008 if (TREE_TYPE (a1) != typea)
7011 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
7012 if (!integer_onep (diff))
7015 return fold_build2 (GE_EXPR, type, a, y);
7018 /* Fold a sum or difference of at least one multiplication.
7019 Returns the folded tree or NULL if no simplification could be made. */
7022 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7024 tree arg00, arg01, arg10, arg11;
7025 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7027 /* (A * C) +- (B * C) -> (A+-B) * C.
7028 (A * C) +- A -> A * (C+-1).
7029 We are most concerned about the case where C is a constant,
7030 but other combinations show up during loop reduction. Since
7031 it is not difficult, try all four possibilities. */
7033 if (TREE_CODE (arg0) == MULT_EXPR)
7035 arg00 = TREE_OPERAND (arg0, 0);
7036 arg01 = TREE_OPERAND (arg0, 1);
7041 arg01 = build_one_cst (type);
7043 if (TREE_CODE (arg1) == MULT_EXPR)
7045 arg10 = TREE_OPERAND (arg1, 0);
7046 arg11 = TREE_OPERAND (arg1, 1);
7051 arg11 = build_one_cst (type);
7055 if (operand_equal_p (arg01, arg11, 0))
7056 same = arg01, alt0 = arg00, alt1 = arg10;
7057 else if (operand_equal_p (arg00, arg10, 0))
7058 same = arg00, alt0 = arg01, alt1 = arg11;
7059 else if (operand_equal_p (arg00, arg11, 0))
7060 same = arg00, alt0 = arg01, alt1 = arg10;
7061 else if (operand_equal_p (arg01, arg10, 0))
7062 same = arg01, alt0 = arg00, alt1 = arg11;
7064 /* No identical multiplicands; see if we can find a common
7065 power-of-two factor in non-power-of-two multiplies. This
7066 can help in multi-dimensional array access. */
7067 else if (host_integerp (arg01, 0)
7068 && host_integerp (arg11, 0))
7070 HOST_WIDE_INT int01, int11, tmp;
7073 int01 = TREE_INT_CST_LOW (arg01);
7074 int11 = TREE_INT_CST_LOW (arg11);
7076 /* Move min of absolute values to int11. */
7077 if ((int01 >= 0 ? int01 : -int01)
7078 < (int11 >= 0 ? int11 : -int11))
7080 tmp = int01, int01 = int11, int11 = tmp;
7081 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7088 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7090 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7091 build_int_cst (TREE_TYPE (arg00),
7096 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7101 return fold_build2 (MULT_EXPR, type,
7102 fold_build2 (code, type,
7103 fold_convert (type, alt0),
7104 fold_convert (type, alt1)),
7105 fold_convert (type, same));
7110 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7111 specified by EXPR into the buffer PTR of length LEN bytes.
7112 Return the number of bytes placed in the buffer, or zero
7116 native_encode_int (tree expr, unsigned char *ptr, int len)
7118 tree type = TREE_TYPE (expr);
7119 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7120 int byte, offset, word, words;
7121 unsigned char value;
7123 if (total_bytes > len)
7125 words = total_bytes / UNITS_PER_WORD;
7127 for (byte = 0; byte < total_bytes; byte++)
7129 int bitpos = byte * BITS_PER_UNIT;
7130 if (bitpos < HOST_BITS_PER_WIDE_INT)
7131 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7133 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7134 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7136 if (total_bytes > UNITS_PER_WORD)
7138 word = byte / UNITS_PER_WORD;
7139 if (WORDS_BIG_ENDIAN)
7140 word = (words - 1) - word;
7141 offset = word * UNITS_PER_WORD;
7142 if (BYTES_BIG_ENDIAN)
7143 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7145 offset += byte % UNITS_PER_WORD;
7148 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7149 ptr[offset] = value;
7155 /* Subroutine of native_encode_expr. Encode the REAL_CST
7156 specified by EXPR into the buffer PTR of length LEN bytes.
7157 Return the number of bytes placed in the buffer, or zero
7161 native_encode_real (tree expr, unsigned char *ptr, int len)
7163 tree type = TREE_TYPE (expr);
7164 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7165 int byte, offset, word, words, bitpos;
7166 unsigned char value;
7168 /* There are always 32 bits in each long, no matter the size of
7169 the hosts long. We handle floating point representations with
7173 if (total_bytes > len)
7175 words = 32 / UNITS_PER_WORD;
7177 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7179 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7180 bitpos += BITS_PER_UNIT)
7182 byte = (bitpos / BITS_PER_UNIT) & 3;
7183 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7185 if (UNITS_PER_WORD < 4)
7187 word = byte / UNITS_PER_WORD;
7188 if (WORDS_BIG_ENDIAN)
7189 word = (words - 1) - word;
7190 offset = word * UNITS_PER_WORD;
7191 if (BYTES_BIG_ENDIAN)
7192 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7194 offset += byte % UNITS_PER_WORD;
7197 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7198 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7203 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7204 specified by EXPR into the buffer PTR of length LEN bytes.
7205 Return the number of bytes placed in the buffer, or zero
7209 native_encode_complex (tree expr, unsigned char *ptr, int len)
7214 part = TREE_REALPART (expr);
7215 rsize = native_encode_expr (part, ptr, len);
7218 part = TREE_IMAGPART (expr);
7219 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7222 return rsize + isize;
7226 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7227 specified by EXPR into the buffer PTR of length LEN bytes.
7228 Return the number of bytes placed in the buffer, or zero
7232 native_encode_vector (tree expr, unsigned char *ptr, int len)
7234 int i, size, offset, count;
7235 tree itype, elem, elements;
7238 elements = TREE_VECTOR_CST_ELTS (expr);
7239 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7240 itype = TREE_TYPE (TREE_TYPE (expr));
7241 size = GET_MODE_SIZE (TYPE_MODE (itype));
7242 for (i = 0; i < count; i++)
7246 elem = TREE_VALUE (elements);
7247 elements = TREE_CHAIN (elements);
7254 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7259 if (offset + size > len)
7261 memset (ptr+offset, 0, size);
7269 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7270 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7271 buffer PTR of length LEN bytes. Return the number of bytes
7272 placed in the buffer, or zero upon failure. */
7275 native_encode_expr (tree expr, unsigned char *ptr, int len)
7277 switch (TREE_CODE (expr))
7280 return native_encode_int (expr, ptr, len);
7283 return native_encode_real (expr, ptr, len);
7286 return native_encode_complex (expr, ptr, len);
7289 return native_encode_vector (expr, ptr, len);
7297 /* Subroutine of native_interpret_expr. Interpret the contents of
7298 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7299 If the buffer cannot be interpreted, return NULL_TREE. */
7302 native_interpret_int (tree type, unsigned char *ptr, int len)
7304 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7305 int byte, offset, word, words;
7306 unsigned char value;
7307 unsigned int HOST_WIDE_INT lo = 0;
7308 HOST_WIDE_INT hi = 0;
7310 if (total_bytes > len)
7312 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7314 words = total_bytes / UNITS_PER_WORD;
7316 for (byte = 0; byte < total_bytes; byte++)
7318 int bitpos = byte * BITS_PER_UNIT;
7319 if (total_bytes > UNITS_PER_WORD)
7321 word = byte / UNITS_PER_WORD;
7322 if (WORDS_BIG_ENDIAN)
7323 word = (words - 1) - word;
7324 offset = word * UNITS_PER_WORD;
7325 if (BYTES_BIG_ENDIAN)
7326 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7328 offset += byte % UNITS_PER_WORD;
7331 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7332 value = ptr[offset];
7334 if (bitpos < HOST_BITS_PER_WIDE_INT)
7335 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7337 hi |= (unsigned HOST_WIDE_INT) value
7338 << (bitpos - HOST_BITS_PER_WIDE_INT);
7341 return build_int_cst_wide_type (type, lo, hi);
7345 /* Subroutine of native_interpret_expr. Interpret the contents of
7346 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7347 If the buffer cannot be interpreted, return NULL_TREE. */
7350 native_interpret_real (tree type, unsigned char *ptr, int len)
7352 enum machine_mode mode = TYPE_MODE (type);
7353 int total_bytes = GET_MODE_SIZE (mode);
7354 int byte, offset, word, words, bitpos;
7355 unsigned char value;
7356 /* There are always 32 bits in each long, no matter the size of
7357 the hosts long. We handle floating point representations with
7362 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7363 if (total_bytes > len || total_bytes > 24)
7365 words = 32 / UNITS_PER_WORD;
7367 memset (tmp, 0, sizeof (tmp));
7368 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7369 bitpos += BITS_PER_UNIT)
7371 byte = (bitpos / BITS_PER_UNIT) & 3;
7372 if (UNITS_PER_WORD < 4)
7374 word = byte / UNITS_PER_WORD;
7375 if (WORDS_BIG_ENDIAN)
7376 word = (words - 1) - word;
7377 offset = word * UNITS_PER_WORD;
7378 if (BYTES_BIG_ENDIAN)
7379 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7381 offset += byte % UNITS_PER_WORD;
7384 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7385 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7387 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7390 real_from_target (&r, tmp, mode);
7391 return build_real (type, r);
7395 /* Subroutine of native_interpret_expr. Interpret the contents of
7396 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7397 If the buffer cannot be interpreted, return NULL_TREE. */
7400 native_interpret_complex (tree type, unsigned char *ptr, int len)
7402 tree etype, rpart, ipart;
7405 etype = TREE_TYPE (type);
7406 size = GET_MODE_SIZE (TYPE_MODE (etype));
7409 rpart = native_interpret_expr (etype, ptr, size);
7412 ipart = native_interpret_expr (etype, ptr+size, size);
7415 return build_complex (type, rpart, ipart);
7419 /* Subroutine of native_interpret_expr. Interpret the contents of
7420 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7421 If the buffer cannot be interpreted, return NULL_TREE. */
7424 native_interpret_vector (tree type, unsigned char *ptr, int len)
7426 tree etype, elem, elements;
7429 etype = TREE_TYPE (type);
7430 size = GET_MODE_SIZE (TYPE_MODE (etype));
7431 count = TYPE_VECTOR_SUBPARTS (type);
7432 if (size * count > len)
7435 elements = NULL_TREE;
7436 for (i = count - 1; i >= 0; i--)
7438 elem = native_interpret_expr (etype, ptr+(i*size), size);
7441 elements = tree_cons (NULL_TREE, elem, elements);
7443 return build_vector (type, elements);
7447 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7448 the buffer PTR of length LEN as a constant of type TYPE. For
7449 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7450 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7451 return NULL_TREE. */
7454 native_interpret_expr (tree type, unsigned char *ptr, int len)
7456 switch (TREE_CODE (type))
7461 return native_interpret_int (type, ptr, len);
7464 return native_interpret_real (type, ptr, len);
7467 return native_interpret_complex (type, ptr, len);
7470 return native_interpret_vector (type, ptr, len);
7478 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7479 TYPE at compile-time. If we're unable to perform the conversion
7480 return NULL_TREE. */
7483 fold_view_convert_expr (tree type, tree expr)
7485 /* We support up to 512-bit values (for V8DFmode). */
7486 unsigned char buffer[64];
7489 /* Check that the host and target are sane. */
7490 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7493 len = native_encode_expr (expr, buffer, sizeof (buffer));
7497 return native_interpret_expr (type, buffer, len);
7501 /* Fold a unary expression of code CODE and type TYPE with operand
7502 OP0. Return the folded expression if folding is successful.
7503 Otherwise, return NULL_TREE. */
7506 fold_unary (enum tree_code code, tree type, tree op0)
7510 enum tree_code_class kind = TREE_CODE_CLASS (code);
7512 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7513 && TREE_CODE_LENGTH (code) == 1);
7518 if (code == NOP_EXPR || code == CONVERT_EXPR
7519 || code == FLOAT_EXPR || code == ABS_EXPR)
7521 /* Don't use STRIP_NOPS, because signedness of argument type
7523 STRIP_SIGN_NOPS (arg0);
7527 /* Strip any conversions that don't change the mode. This
7528 is safe for every expression, except for a comparison
7529 expression because its signedness is derived from its
7532 Note that this is done as an internal manipulation within
7533 the constant folder, in order to find the simplest
7534 representation of the arguments so that their form can be
7535 studied. In any cases, the appropriate type conversions
7536 should be put back in the tree that will get out of the
7542 if (TREE_CODE_CLASS (code) == tcc_unary)
7544 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7545 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7546 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7547 else if (TREE_CODE (arg0) == COND_EXPR)
7549 tree arg01 = TREE_OPERAND (arg0, 1);
7550 tree arg02 = TREE_OPERAND (arg0, 2);
7551 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7552 arg01 = fold_build1 (code, type, arg01);
7553 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7554 arg02 = fold_build1 (code, type, arg02);
7555 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7558 /* If this was a conversion, and all we did was to move into
7559 inside the COND_EXPR, bring it back out. But leave it if
7560 it is a conversion from integer to integer and the
7561 result precision is no wider than a word since such a
7562 conversion is cheap and may be optimized away by combine,
7563 while it couldn't if it were outside the COND_EXPR. Then return
7564 so we don't get into an infinite recursion loop taking the
7565 conversion out and then back in. */
7567 if ((code == NOP_EXPR || code == CONVERT_EXPR
7568 || code == NON_LVALUE_EXPR)
7569 && TREE_CODE (tem) == COND_EXPR
7570 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7571 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7572 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7573 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7574 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7575 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7576 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7578 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7579 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7580 || flag_syntax_only))
7581 tem = build1 (code, type,
7583 TREE_TYPE (TREE_OPERAND
7584 (TREE_OPERAND (tem, 1), 0)),
7585 TREE_OPERAND (tem, 0),
7586 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7587 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7590 else if (COMPARISON_CLASS_P (arg0))
7592 if (TREE_CODE (type) == BOOLEAN_TYPE)
7594 arg0 = copy_node (arg0);
7595 TREE_TYPE (arg0) = type;
7598 else if (TREE_CODE (type) != INTEGER_TYPE)
7599 return fold_build3 (COND_EXPR, type, arg0,
7600 fold_build1 (code, type,
7602 fold_build1 (code, type,
7603 integer_zero_node));
7612 case FIX_TRUNC_EXPR:
7613 if (TREE_TYPE (op0) == type)
7616 /* If we have (type) (a CMP b) and type is an integral type, return
7617 new expression involving the new type. */
7618 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7619 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7620 TREE_OPERAND (op0, 1));
7622 /* Handle cases of two conversions in a row. */
7623 if (TREE_CODE (op0) == NOP_EXPR
7624 || TREE_CODE (op0) == CONVERT_EXPR)
7626 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7627 tree inter_type = TREE_TYPE (op0);
7628 int inside_int = INTEGRAL_TYPE_P (inside_type);
7629 int inside_ptr = POINTER_TYPE_P (inside_type);
7630 int inside_float = FLOAT_TYPE_P (inside_type);
7631 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7632 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7633 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7634 int inter_int = INTEGRAL_TYPE_P (inter_type);
7635 int inter_ptr = POINTER_TYPE_P (inter_type);
7636 int inter_float = FLOAT_TYPE_P (inter_type);
7637 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7638 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7639 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7640 int final_int = INTEGRAL_TYPE_P (type);
7641 int final_ptr = POINTER_TYPE_P (type);
7642 int final_float = FLOAT_TYPE_P (type);
7643 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7644 unsigned int final_prec = TYPE_PRECISION (type);
7645 int final_unsignedp = TYPE_UNSIGNED (type);
7647 /* In addition to the cases of two conversions in a row
7648 handled below, if we are converting something to its own
7649 type via an object of identical or wider precision, neither
7650 conversion is needed. */
7651 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7652 && (((inter_int || inter_ptr) && final_int)
7653 || (inter_float && final_float))
7654 && inter_prec >= final_prec)
7655 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7657 /* Likewise, if the intermediate and final types are either both
7658 float or both integer, we don't need the middle conversion if
7659 it is wider than the final type and doesn't change the signedness
7660 (for integers). Avoid this if the final type is a pointer
7661 since then we sometimes need the inner conversion. Likewise if
7662 the outer has a precision not equal to the size of its mode. */
7663 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7664 || (inter_float && inside_float)
7665 || (inter_vec && inside_vec))
7666 && inter_prec >= inside_prec
7667 && (inter_float || inter_vec
7668 || inter_unsignedp == inside_unsignedp)
7669 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7670 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7672 && (! final_vec || inter_prec == inside_prec))
7673 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7675 /* If we have a sign-extension of a zero-extended value, we can
7676 replace that by a single zero-extension. */
7677 if (inside_int && inter_int && final_int
7678 && inside_prec < inter_prec && inter_prec < final_prec
7679 && inside_unsignedp && !inter_unsignedp)
7680 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7682 /* Two conversions in a row are not needed unless:
7683 - some conversion is floating-point (overstrict for now), or
7684 - some conversion is a vector (overstrict for now), or
7685 - the intermediate type is narrower than both initial and
7687 - the intermediate type and innermost type differ in signedness,
7688 and the outermost type is wider than the intermediate, or
7689 - the initial type is a pointer type and the precisions of the
7690 intermediate and final types differ, or
7691 - the final type is a pointer type and the precisions of the
7692 initial and intermediate types differ.
7693 - the final type is a pointer type and the initial type not
7694 - the initial type is a pointer to an array and the final type
7696 if (! inside_float && ! inter_float && ! final_float
7697 && ! inside_vec && ! inter_vec && ! final_vec
7698 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7699 && ! (inside_int && inter_int
7700 && inter_unsignedp != inside_unsignedp
7701 && inter_prec < final_prec)
7702 && ((inter_unsignedp && inter_prec > inside_prec)
7703 == (final_unsignedp && final_prec > inter_prec))
7704 && ! (inside_ptr && inter_prec != final_prec)
7705 && ! (final_ptr && inside_prec != inter_prec)
7706 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7707 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7708 && final_ptr == inside_ptr
7710 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7711 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7712 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7715 /* Handle (T *)&A.B.C for A being of type T and B and C
7716 living at offset zero. This occurs frequently in
7717 C++ upcasting and then accessing the base. */
7718 if (TREE_CODE (op0) == ADDR_EXPR
7719 && POINTER_TYPE_P (type)
7720 && handled_component_p (TREE_OPERAND (op0, 0)))
7722 HOST_WIDE_INT bitsize, bitpos;
7724 enum machine_mode mode;
7725 int unsignedp, volatilep;
7726 tree base = TREE_OPERAND (op0, 0);
7727 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7728 &mode, &unsignedp, &volatilep, false);
7729 /* If the reference was to a (constant) zero offset, we can use
7730 the address of the base if it has the same base type
7731 as the result type. */
7732 if (! offset && bitpos == 0
7733 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7734 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7735 return fold_convert (type, build_fold_addr_expr (base));
7738 if ((TREE_CODE (op0) == MODIFY_EXPR
7739 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7740 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7741 /* Detect assigning a bitfield. */
7742 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7744 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7746 /* Don't leave an assignment inside a conversion
7747 unless assigning a bitfield. */
7748 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7749 /* First do the assignment, then return converted constant. */
7750 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7751 TREE_NO_WARNING (tem) = 1;
7752 TREE_USED (tem) = 1;
7756 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7757 constants (if x has signed type, the sign bit cannot be set
7758 in c). This folds extension into the BIT_AND_EXPR. */
7759 if (INTEGRAL_TYPE_P (type)
7760 && TREE_CODE (type) != BOOLEAN_TYPE
7761 && TREE_CODE (op0) == BIT_AND_EXPR
7762 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7765 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7768 if (TYPE_UNSIGNED (TREE_TYPE (and))
7769 || (TYPE_PRECISION (type)
7770 <= TYPE_PRECISION (TREE_TYPE (and))))
7772 else if (TYPE_PRECISION (TREE_TYPE (and1))
7773 <= HOST_BITS_PER_WIDE_INT
7774 && host_integerp (and1, 1))
7776 unsigned HOST_WIDE_INT cst;
7778 cst = tree_low_cst (and1, 1);
7779 cst &= (HOST_WIDE_INT) -1
7780 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7781 change = (cst == 0);
7782 #ifdef LOAD_EXTEND_OP
7784 && !flag_syntax_only
7785 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7788 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7789 and0 = fold_convert (uns, and0);
7790 and1 = fold_convert (uns, and1);
7796 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7797 TREE_INT_CST_HIGH (and1), 0,
7798 TREE_OVERFLOW (and1));
7799 return fold_build2 (BIT_AND_EXPR, type,
7800 fold_convert (type, and0), tem);
7804 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7805 T2 being pointers to types of the same size. */
7806 if (POINTER_TYPE_P (type)
7807 && BINARY_CLASS_P (arg0)
7808 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7809 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7811 tree arg00 = TREE_OPERAND (arg0, 0);
7813 tree t1 = TREE_TYPE (arg00);
7814 tree tt0 = TREE_TYPE (t0);
7815 tree tt1 = TREE_TYPE (t1);
7816 tree s0 = TYPE_SIZE (tt0);
7817 tree s1 = TYPE_SIZE (tt1);
7819 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7820 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7821 TREE_OPERAND (arg0, 1));
7824 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7825 of the same precision, and X is a integer type not narrower than
7826 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7827 if (INTEGRAL_TYPE_P (type)
7828 && TREE_CODE (op0) == BIT_NOT_EXPR
7829 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7830 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7831 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7832 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7834 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7835 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7836 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7837 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7840 tem = fold_convert_const (code, type, arg0);
7841 return tem ? tem : NULL_TREE;
7843 case VIEW_CONVERT_EXPR:
7844 if (TREE_TYPE (op0) == type)
7846 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7847 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7848 return fold_view_convert_expr (type, op0);
7851 tem = fold_negate_expr (arg0);
7853 return fold_convert (type, tem);
7857 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7858 return fold_abs_const (arg0, type);
7859 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7860 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7861 /* Convert fabs((double)float) into (double)fabsf(float). */
7862 else if (TREE_CODE (arg0) == NOP_EXPR
7863 && TREE_CODE (type) == REAL_TYPE)
7865 tree targ0 = strip_float_extensions (arg0);
7867 return fold_convert (type, fold_build1 (ABS_EXPR,
7871 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7872 else if (TREE_CODE (arg0) == ABS_EXPR)
7874 else if (tree_expr_nonnegative_p (arg0))
7877 /* Strip sign ops from argument. */
7878 if (TREE_CODE (type) == REAL_TYPE)
7880 tem = fold_strip_sign_ops (arg0);
7882 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7887 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7888 return fold_convert (type, arg0);
7889 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7891 tree itype = TREE_TYPE (type);
7892 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7893 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7894 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7896 if (TREE_CODE (arg0) == COMPLEX_CST)
7898 tree itype = TREE_TYPE (type);
7899 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7900 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7901 return build_complex (type, rpart, negate_expr (ipart));
7903 if (TREE_CODE (arg0) == CONJ_EXPR)
7904 return fold_convert (type, TREE_OPERAND (arg0, 0));
7908 if (TREE_CODE (arg0) == INTEGER_CST)
7909 return fold_not_const (arg0, type);
7910 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7911 return TREE_OPERAND (arg0, 0);
7912 /* Convert ~ (-A) to A - 1. */
7913 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7914 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7915 build_int_cst (type, 1));
7916 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7917 else if (INTEGRAL_TYPE_P (type)
7918 && ((TREE_CODE (arg0) == MINUS_EXPR
7919 && integer_onep (TREE_OPERAND (arg0, 1)))
7920 || (TREE_CODE (arg0) == PLUS_EXPR
7921 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7922 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7923 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7924 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7925 && (tem = fold_unary (BIT_NOT_EXPR, type,
7927 TREE_OPERAND (arg0, 0)))))
7928 return fold_build2 (BIT_XOR_EXPR, type, tem,
7929 fold_convert (type, TREE_OPERAND (arg0, 1)));
7930 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7931 && (tem = fold_unary (BIT_NOT_EXPR, type,
7933 TREE_OPERAND (arg0, 1)))))
7934 return fold_build2 (BIT_XOR_EXPR, type,
7935 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7939 case TRUTH_NOT_EXPR:
7940 /* The argument to invert_truthvalue must have Boolean type. */
7941 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7942 arg0 = fold_convert (boolean_type_node, arg0);
7944 /* Note that the operand of this must be an int
7945 and its values must be 0 or 1.
7946 ("true" is a fixed value perhaps depending on the language,
7947 but we don't handle values other than 1 correctly yet.) */
7948 tem = fold_truth_not_expr (arg0);
7951 return fold_convert (type, tem);
7954 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7955 return fold_convert (type, arg0);
7956 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7957 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7958 TREE_OPERAND (arg0, 1));
7959 if (TREE_CODE (arg0) == COMPLEX_CST)
7960 return fold_convert (type, TREE_REALPART (arg0));
7961 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7963 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7964 tem = fold_build2 (TREE_CODE (arg0), itype,
7965 fold_build1 (REALPART_EXPR, itype,
7966 TREE_OPERAND (arg0, 0)),
7967 fold_build1 (REALPART_EXPR, itype,
7968 TREE_OPERAND (arg0, 1)));
7969 return fold_convert (type, tem);
7971 if (TREE_CODE (arg0) == CONJ_EXPR)
7973 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7974 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7975 return fold_convert (type, tem);
7977 if (TREE_CODE (arg0) == CALL_EXPR)
7979 tree fn = get_callee_fndecl (arg0);
7980 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7981 switch (DECL_FUNCTION_CODE (fn))
7983 CASE_FLT_FN (BUILT_IN_CEXPI):
7984 fn = mathfn_built_in (type, BUILT_IN_COS);
7986 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
7996 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7997 return fold_convert (type, integer_zero_node);
7998 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7999 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8000 TREE_OPERAND (arg0, 0));
8001 if (TREE_CODE (arg0) == COMPLEX_CST)
8002 return fold_convert (type, TREE_IMAGPART (arg0));
8003 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8005 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8006 tem = fold_build2 (TREE_CODE (arg0), itype,
8007 fold_build1 (IMAGPART_EXPR, itype,
8008 TREE_OPERAND (arg0, 0)),
8009 fold_build1 (IMAGPART_EXPR, itype,
8010 TREE_OPERAND (arg0, 1)));
8011 return fold_convert (type, tem);
8013 if (TREE_CODE (arg0) == CONJ_EXPR)
8015 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8016 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8017 return fold_convert (type, negate_expr (tem));
8019 if (TREE_CODE (arg0) == CALL_EXPR)
8021 tree fn = get_callee_fndecl (arg0);
8022 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8023 switch (DECL_FUNCTION_CODE (fn))
8025 CASE_FLT_FN (BUILT_IN_CEXPI):
8026 fn = mathfn_built_in (type, BUILT_IN_SIN);
8028 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8039 } /* switch (code) */
8042 /* Fold a binary expression of code CODE and type TYPE with operands
8043 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8044 Return the folded expression if folding is successful. Otherwise,
8045 return NULL_TREE. */
8048 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8050 enum tree_code compl_code;
8052 if (code == MIN_EXPR)
8053 compl_code = MAX_EXPR;
8054 else if (code == MAX_EXPR)
8055 compl_code = MIN_EXPR;
8059 /* MIN (MAX (a, b), b) == b. */
8060 if (TREE_CODE (op0) == compl_code
8061 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8062 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8064 /* MIN (MAX (b, a), b) == b. */
8065 if (TREE_CODE (op0) == compl_code
8066 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8067 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8068 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8070 /* MIN (a, MAX (a, b)) == a. */
8071 if (TREE_CODE (op1) == compl_code
8072 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8073 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8074 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8076 /* MIN (a, MAX (b, a)) == a. */
8077 if (TREE_CODE (op1) == compl_code
8078 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8079 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8080 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8085 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8086 by changing CODE to reduce the magnitude of constants involved in
8087 ARG0 of the comparison.
8088 Returns a canonicalized comparison tree if a simplification was
8089 possible, otherwise returns NULL_TREE.
8090 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8091 valid if signed overflow is undefined. */
8094 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8095 tree arg0, tree arg1,
8096 bool *strict_overflow_p)
8098 enum tree_code code0 = TREE_CODE (arg0);
8099 tree t, cst0 = NULL_TREE;
8103 /* Match A +- CST code arg1 and CST code arg1. */
8104 if (!(((code0 == MINUS_EXPR
8105 || code0 == PLUS_EXPR)
8106 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8107 || code0 == INTEGER_CST))
8110 /* Identify the constant in arg0 and its sign. */
8111 if (code0 == INTEGER_CST)
8114 cst0 = TREE_OPERAND (arg0, 1);
8115 sgn0 = tree_int_cst_sgn (cst0);
8117 /* Overflowed constants and zero will cause problems. */
8118 if (integer_zerop (cst0)
8119 || TREE_OVERFLOW (cst0))
8122 /* See if we can reduce the magnitude of the constant in
8123 arg0 by changing the comparison code. */
8124 if (code0 == INTEGER_CST)
8126 /* CST <= arg1 -> CST-1 < arg1. */
8127 if (code == LE_EXPR && sgn0 == 1)
8129 /* -CST < arg1 -> -CST-1 <= arg1. */
8130 else if (code == LT_EXPR && sgn0 == -1)
8132 /* CST > arg1 -> CST-1 >= arg1. */
8133 else if (code == GT_EXPR && sgn0 == 1)
8135 /* -CST >= arg1 -> -CST-1 > arg1. */
8136 else if (code == GE_EXPR && sgn0 == -1)
8140 /* arg1 code' CST' might be more canonical. */
8145 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8147 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8149 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8150 else if (code == GT_EXPR
8151 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8153 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8154 else if (code == LE_EXPR
8155 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8157 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8158 else if (code == GE_EXPR
8159 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8163 *strict_overflow_p = true;
8166 /* Now build the constant reduced in magnitude. */
8167 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8168 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8169 if (code0 != INTEGER_CST)
8170 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8172 /* If swapping might yield to a more canonical form, do so. */
8174 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8176 return fold_build2 (code, type, t, arg1);
8179 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8180 overflow further. Try to decrease the magnitude of constants involved
8181 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8182 and put sole constants at the second argument position.
8183 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8186 maybe_canonicalize_comparison (enum tree_code code, tree type,
8187 tree arg0, tree arg1)
8190 bool strict_overflow_p;
8191 const char * const warnmsg = G_("assuming signed overflow does not occur "
8192 "when reducing constant in comparison");
8194 /* In principle pointers also have undefined overflow behavior,
8195 but that causes problems elsewhere. */
8196 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8197 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8200 /* Try canonicalization by simplifying arg0. */
8201 strict_overflow_p = false;
8202 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8203 &strict_overflow_p);
8206 if (strict_overflow_p)
8207 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8211 /* Try canonicalization by simplifying arg1 using the swapped
8213 code = swap_tree_comparison (code);
8214 strict_overflow_p = false;
8215 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8216 &strict_overflow_p);
8217 if (t && strict_overflow_p)
8218 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8222 /* Subroutine of fold_binary. This routine performs all of the
8223 transformations that are common to the equality/inequality
8224 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8225 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8226 fold_binary should call fold_binary. Fold a comparison with
8227 tree code CODE and type TYPE with operands OP0 and OP1. Return
8228 the folded comparison or NULL_TREE. */
8231 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8233 tree arg0, arg1, tem;
8238 STRIP_SIGN_NOPS (arg0);
8239 STRIP_SIGN_NOPS (arg1);
8241 tem = fold_relational_const (code, type, arg0, arg1);
8242 if (tem != NULL_TREE)
8245 /* If one arg is a real or integer constant, put it last. */
8246 if (tree_swap_operands_p (arg0, arg1, true))
8247 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8249 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8250 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8251 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8252 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8253 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8254 && (TREE_CODE (arg1) == INTEGER_CST
8255 && !TREE_OVERFLOW (arg1)))
8257 tree const1 = TREE_OPERAND (arg0, 1);
8259 tree variable = TREE_OPERAND (arg0, 0);
8262 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8264 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8265 TREE_TYPE (arg1), const2, const1);
8267 /* If the constant operation overflowed this can be
8268 simplified as a comparison against INT_MAX/INT_MIN. */
8269 if (TREE_CODE (lhs) == INTEGER_CST
8270 && TREE_OVERFLOW (lhs))
8272 int const1_sgn = tree_int_cst_sgn (const1);
8273 enum tree_code code2 = code;
8275 /* Get the sign of the constant on the lhs if the
8276 operation were VARIABLE + CONST1. */
8277 if (TREE_CODE (arg0) == MINUS_EXPR)
8278 const1_sgn = -const1_sgn;
8280 /* The sign of the constant determines if we overflowed
8281 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8282 Canonicalize to the INT_MIN overflow by swapping the comparison
8284 if (const1_sgn == -1)
8285 code2 = swap_tree_comparison (code);
8287 /* We now can look at the canonicalized case
8288 VARIABLE + 1 CODE2 INT_MIN
8289 and decide on the result. */
8290 if (code2 == LT_EXPR
8292 || code2 == EQ_EXPR)
8293 return omit_one_operand (type, boolean_false_node, variable);
8294 else if (code2 == NE_EXPR
8296 || code2 == GT_EXPR)
8297 return omit_one_operand (type, boolean_true_node, variable);
8300 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8301 && (TREE_CODE (lhs) != INTEGER_CST
8302 || !TREE_OVERFLOW (lhs)))
8304 fold_overflow_warning (("assuming signed overflow does not occur "
8305 "when changing X +- C1 cmp C2 to "
8307 WARN_STRICT_OVERFLOW_COMPARISON);
8308 return fold_build2 (code, type, variable, lhs);
8312 /* For comparisons of pointers we can decompose it to a compile time
8313 comparison of the base objects and the offsets into the object.
8314 This requires at least one operand being an ADDR_EXPR to do more
8315 than the operand_equal_p test below. */
8316 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8317 && (TREE_CODE (arg0) == ADDR_EXPR
8318 || TREE_CODE (arg1) == ADDR_EXPR))
8320 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8321 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8322 enum machine_mode mode;
8323 int volatilep, unsignedp;
8324 bool indirect_base0 = false;
8326 /* Get base and offset for the access. Strip ADDR_EXPR for
8327 get_inner_reference, but put it back by stripping INDIRECT_REF
8328 off the base object if possible. */
8330 if (TREE_CODE (arg0) == ADDR_EXPR)
8332 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8333 &bitsize, &bitpos0, &offset0, &mode,
8334 &unsignedp, &volatilep, false);
8335 if (TREE_CODE (base0) == INDIRECT_REF)
8336 base0 = TREE_OPERAND (base0, 0);
8338 indirect_base0 = true;
8342 if (TREE_CODE (arg1) == ADDR_EXPR)
8344 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8345 &bitsize, &bitpos1, &offset1, &mode,
8346 &unsignedp, &volatilep, false);
8347 /* We have to make sure to have an indirect/non-indirect base1
8348 just the same as we did for base0. */
8349 if (TREE_CODE (base1) == INDIRECT_REF
8351 base1 = TREE_OPERAND (base1, 0);
8352 else if (!indirect_base0)
8355 else if (indirect_base0)
8358 /* If we have equivalent bases we might be able to simplify. */
8360 && operand_equal_p (base0, base1, 0))
8362 /* We can fold this expression to a constant if the non-constant
8363 offset parts are equal. */
8364 if (offset0 == offset1
8365 || (offset0 && offset1
8366 && operand_equal_p (offset0, offset1, 0)))
8371 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8373 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8375 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8377 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8379 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8381 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8385 /* We can simplify the comparison to a comparison of the variable
8386 offset parts if the constant offset parts are equal.
8387 Be careful to use signed size type here because otherwise we
8388 mess with array offsets in the wrong way. This is possible
8389 because pointer arithmetic is restricted to retain within an
8390 object and overflow on pointer differences is undefined as of
8391 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8392 else if (bitpos0 == bitpos1)
8394 tree signed_size_type_node;
8395 signed_size_type_node = signed_type_for (size_type_node);
8397 /* By converting to signed size type we cover middle-end pointer
8398 arithmetic which operates on unsigned pointer types of size
8399 type size and ARRAY_REF offsets which are properly sign or
8400 zero extended from their type in case it is narrower than
8402 if (offset0 == NULL_TREE)
8403 offset0 = build_int_cst (signed_size_type_node, 0);
8405 offset0 = fold_convert (signed_size_type_node, offset0);
8406 if (offset1 == NULL_TREE)
8407 offset1 = build_int_cst (signed_size_type_node, 0);
8409 offset1 = fold_convert (signed_size_type_node, offset1);
8411 return fold_build2 (code, type, offset0, offset1);
8416 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8417 same object, then we can fold this to a comparison of the two offsets in
8418 signed size type. This is possible because pointer arithmetic is
8419 restricted to retain within an object and overflow on pointer differences
8420 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8422 We check flag_wrapv directly because pointers types are unsigned,
8423 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8424 normally what we want to avoid certain odd overflow cases, but
8426 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8428 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8430 tree base0, offset0, base1, offset1;
8432 if (extract_array_ref (arg0, &base0, &offset0)
8433 && extract_array_ref (arg1, &base1, &offset1)
8434 && operand_equal_p (base0, base1, 0))
8436 tree signed_size_type_node;
8437 signed_size_type_node = signed_type_for (size_type_node);
8439 /* By converting to signed size type we cover middle-end pointer
8440 arithmetic which operates on unsigned pointer types of size
8441 type size and ARRAY_REF offsets which are properly sign or
8442 zero extended from their type in case it is narrower than
8444 if (offset0 == NULL_TREE)
8445 offset0 = build_int_cst (signed_size_type_node, 0);
8447 offset0 = fold_convert (signed_size_type_node, offset0);
8448 if (offset1 == NULL_TREE)
8449 offset1 = build_int_cst (signed_size_type_node, 0);
8451 offset1 = fold_convert (signed_size_type_node, offset1);
8453 return fold_build2 (code, type, offset0, offset1);
8457 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8458 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8459 the resulting offset is smaller in absolute value than the
8461 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8462 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8463 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8464 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8465 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8466 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8467 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8469 tree const1 = TREE_OPERAND (arg0, 1);
8470 tree const2 = TREE_OPERAND (arg1, 1);
8471 tree variable1 = TREE_OPERAND (arg0, 0);
8472 tree variable2 = TREE_OPERAND (arg1, 0);
8474 const char * const warnmsg = G_("assuming signed overflow does not "
8475 "occur when combining constants around "
8478 /* Put the constant on the side where it doesn't overflow and is
8479 of lower absolute value than before. */
8480 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8481 ? MINUS_EXPR : PLUS_EXPR,
8483 if (!TREE_OVERFLOW (cst)
8484 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8486 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8487 return fold_build2 (code, type,
8489 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8493 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8494 ? MINUS_EXPR : PLUS_EXPR,
8496 if (!TREE_OVERFLOW (cst)
8497 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8499 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8500 return fold_build2 (code, type,
8501 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8507 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8508 signed arithmetic case. That form is created by the compiler
8509 often enough for folding it to be of value. One example is in
8510 computing loop trip counts after Operator Strength Reduction. */
8511 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8512 && TREE_CODE (arg0) == MULT_EXPR
8513 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8514 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8515 && integer_zerop (arg1))
8517 tree const1 = TREE_OPERAND (arg0, 1);
8518 tree const2 = arg1; /* zero */
8519 tree variable1 = TREE_OPERAND (arg0, 0);
8520 enum tree_code cmp_code = code;
8522 gcc_assert (!integer_zerop (const1));
8524 fold_overflow_warning (("assuming signed overflow does not occur when "
8525 "eliminating multiplication in comparison "
8527 WARN_STRICT_OVERFLOW_COMPARISON);
8529 /* If const1 is negative we swap the sense of the comparison. */
8530 if (tree_int_cst_sgn (const1) < 0)
8531 cmp_code = swap_tree_comparison (cmp_code);
8533 return fold_build2 (cmp_code, type, variable1, const2);
8536 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8540 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8542 tree targ0 = strip_float_extensions (arg0);
8543 tree targ1 = strip_float_extensions (arg1);
8544 tree newtype = TREE_TYPE (targ0);
8546 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8547 newtype = TREE_TYPE (targ1);
8549 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8550 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8551 return fold_build2 (code, type, fold_convert (newtype, targ0),
8552 fold_convert (newtype, targ1));
8554 /* (-a) CMP (-b) -> b CMP a */
8555 if (TREE_CODE (arg0) == NEGATE_EXPR
8556 && TREE_CODE (arg1) == NEGATE_EXPR)
8557 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8558 TREE_OPERAND (arg0, 0));
8560 if (TREE_CODE (arg1) == REAL_CST)
8562 REAL_VALUE_TYPE cst;
8563 cst = TREE_REAL_CST (arg1);
8565 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8566 if (TREE_CODE (arg0) == NEGATE_EXPR)
8567 return fold_build2 (swap_tree_comparison (code), type,
8568 TREE_OPERAND (arg0, 0),
8569 build_real (TREE_TYPE (arg1),
8570 REAL_VALUE_NEGATE (cst)));
8572 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8573 /* a CMP (-0) -> a CMP 0 */
8574 if (REAL_VALUE_MINUS_ZERO (cst))
8575 return fold_build2 (code, type, arg0,
8576 build_real (TREE_TYPE (arg1), dconst0));
8578 /* x != NaN is always true, other ops are always false. */
8579 if (REAL_VALUE_ISNAN (cst)
8580 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8582 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8583 return omit_one_operand (type, tem, arg0);
8586 /* Fold comparisons against infinity. */
8587 if (REAL_VALUE_ISINF (cst))
8589 tem = fold_inf_compare (code, type, arg0, arg1);
8590 if (tem != NULL_TREE)
8595 /* If this is a comparison of a real constant with a PLUS_EXPR
8596 or a MINUS_EXPR of a real constant, we can convert it into a
8597 comparison with a revised real constant as long as no overflow
8598 occurs when unsafe_math_optimizations are enabled. */
8599 if (flag_unsafe_math_optimizations
8600 && TREE_CODE (arg1) == REAL_CST
8601 && (TREE_CODE (arg0) == PLUS_EXPR
8602 || TREE_CODE (arg0) == MINUS_EXPR)
8603 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8604 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8605 ? MINUS_EXPR : PLUS_EXPR,
8606 arg1, TREE_OPERAND (arg0, 1), 0))
8607 && !TREE_OVERFLOW (tem))
8608 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8610 /* Likewise, we can simplify a comparison of a real constant with
8611 a MINUS_EXPR whose first operand is also a real constant, i.e.
8612 (c1 - x) < c2 becomes x > c1-c2. */
8613 if (flag_unsafe_math_optimizations
8614 && TREE_CODE (arg1) == REAL_CST
8615 && TREE_CODE (arg0) == MINUS_EXPR
8616 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8617 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8619 && !TREE_OVERFLOW (tem))
8620 return fold_build2 (swap_tree_comparison (code), type,
8621 TREE_OPERAND (arg0, 1), tem);
8623 /* Fold comparisons against built-in math functions. */
8624 if (TREE_CODE (arg1) == REAL_CST
8625 && flag_unsafe_math_optimizations
8626 && ! flag_errno_math)
8628 enum built_in_function fcode = builtin_mathfn_code (arg0);
8630 if (fcode != END_BUILTINS)
8632 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8633 if (tem != NULL_TREE)
8639 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8640 if (TREE_CONSTANT (arg1)
8641 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8642 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8643 /* This optimization is invalid for ordered comparisons
8644 if CONST+INCR overflows or if foo+incr might overflow.
8645 This optimization is invalid for floating point due to rounding.
8646 For pointer types we assume overflow doesn't happen. */
8647 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8648 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8649 && (code == EQ_EXPR || code == NE_EXPR))))
8651 tree varop, newconst;
8653 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8655 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8656 arg1, TREE_OPERAND (arg0, 1));
8657 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8658 TREE_OPERAND (arg0, 0),
8659 TREE_OPERAND (arg0, 1));
8663 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8664 arg1, TREE_OPERAND (arg0, 1));
8665 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8666 TREE_OPERAND (arg0, 0),
8667 TREE_OPERAND (arg0, 1));
8671 /* If VAROP is a reference to a bitfield, we must mask
8672 the constant by the width of the field. */
8673 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8674 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8675 && host_integerp (DECL_SIZE (TREE_OPERAND
8676 (TREE_OPERAND (varop, 0), 1)), 1))
8678 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8679 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8680 tree folded_compare, shift;
8682 /* First check whether the comparison would come out
8683 always the same. If we don't do that we would
8684 change the meaning with the masking. */
8685 folded_compare = fold_build2 (code, type,
8686 TREE_OPERAND (varop, 0), arg1);
8687 if (TREE_CODE (folded_compare) == INTEGER_CST)
8688 return omit_one_operand (type, folded_compare, varop);
8690 shift = build_int_cst (NULL_TREE,
8691 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8692 shift = fold_convert (TREE_TYPE (varop), shift);
8693 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8695 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8699 return fold_build2 (code, type, varop, newconst);
8702 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8703 && (TREE_CODE (arg0) == NOP_EXPR
8704 || TREE_CODE (arg0) == CONVERT_EXPR))
8706 /* If we are widening one operand of an integer comparison,
8707 see if the other operand is similarly being widened. Perhaps we
8708 can do the comparison in the narrower type. */
8709 tem = fold_widened_comparison (code, type, arg0, arg1);
8713 /* Or if we are changing signedness. */
8714 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8719 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8720 constant, we can simplify it. */
8721 if (TREE_CODE (arg1) == INTEGER_CST
8722 && (TREE_CODE (arg0) == MIN_EXPR
8723 || TREE_CODE (arg0) == MAX_EXPR)
8724 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8726 tem = optimize_minmax_comparison (code, type, op0, op1);
8731 /* Simplify comparison of something with itself. (For IEEE
8732 floating-point, we can only do some of these simplifications.) */
8733 if (operand_equal_p (arg0, arg1, 0))
8738 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8739 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8740 return constant_boolean_node (1, type);
8745 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8746 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8747 return constant_boolean_node (1, type);
8748 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8751 /* For NE, we can only do this simplification if integer
8752 or we don't honor IEEE floating point NaNs. */
8753 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8754 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8756 /* ... fall through ... */
8759 return constant_boolean_node (0, type);
8765 /* If we are comparing an expression that just has comparisons
8766 of two integer values, arithmetic expressions of those comparisons,
8767 and constants, we can simplify it. There are only three cases
8768 to check: the two values can either be equal, the first can be
8769 greater, or the second can be greater. Fold the expression for
8770 those three values. Since each value must be 0 or 1, we have
8771 eight possibilities, each of which corresponds to the constant 0
8772 or 1 or one of the six possible comparisons.
8774 This handles common cases like (a > b) == 0 but also handles
8775 expressions like ((x > y) - (y > x)) > 0, which supposedly
8776 occur in macroized code. */
8778 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8780 tree cval1 = 0, cval2 = 0;
8783 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8784 /* Don't handle degenerate cases here; they should already
8785 have been handled anyway. */
8786 && cval1 != 0 && cval2 != 0
8787 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8788 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8789 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8790 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8791 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8792 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8793 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8795 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8796 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8798 /* We can't just pass T to eval_subst in case cval1 or cval2
8799 was the same as ARG1. */
8802 = fold_build2 (code, type,
8803 eval_subst (arg0, cval1, maxval,
8807 = fold_build2 (code, type,
8808 eval_subst (arg0, cval1, maxval,
8812 = fold_build2 (code, type,
8813 eval_subst (arg0, cval1, minval,
8817 /* All three of these results should be 0 or 1. Confirm they are.
8818 Then use those values to select the proper code to use. */
8820 if (TREE_CODE (high_result) == INTEGER_CST
8821 && TREE_CODE (equal_result) == INTEGER_CST
8822 && TREE_CODE (low_result) == INTEGER_CST)
8824 /* Make a 3-bit mask with the high-order bit being the
8825 value for `>', the next for '=', and the low for '<'. */
8826 switch ((integer_onep (high_result) * 4)
8827 + (integer_onep (equal_result) * 2)
8828 + integer_onep (low_result))
8832 return omit_one_operand (type, integer_zero_node, arg0);
8853 return omit_one_operand (type, integer_one_node, arg0);
8857 return save_expr (build2 (code, type, cval1, cval2));
8858 return fold_build2 (code, type, cval1, cval2);
8863 /* Fold a comparison of the address of COMPONENT_REFs with the same
8864 type and component to a comparison of the address of the base
8865 object. In short, &x->a OP &y->a to x OP y and
8866 &x->a OP &y.a to x OP &y */
8867 if (TREE_CODE (arg0) == ADDR_EXPR
8868 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8869 && TREE_CODE (arg1) == ADDR_EXPR
8870 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8872 tree cref0 = TREE_OPERAND (arg0, 0);
8873 tree cref1 = TREE_OPERAND (arg1, 0);
8874 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8876 tree op0 = TREE_OPERAND (cref0, 0);
8877 tree op1 = TREE_OPERAND (cref1, 0);
8878 return fold_build2 (code, type,
8879 build_fold_addr_expr (op0),
8880 build_fold_addr_expr (op1));
8884 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8885 into a single range test. */
8886 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8887 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8888 && TREE_CODE (arg1) == INTEGER_CST
8889 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8890 && !integer_zerop (TREE_OPERAND (arg0, 1))
8891 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8892 && !TREE_OVERFLOW (arg1))
8894 tem = fold_div_compare (code, type, arg0, arg1);
8895 if (tem != NULL_TREE)
8899 /* Fold ~X op ~Y as Y op X. */
8900 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8901 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8903 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8904 return fold_build2 (code, type,
8905 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
8906 TREE_OPERAND (arg0, 0));
8909 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8910 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8911 && TREE_CODE (arg1) == INTEGER_CST)
8913 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8914 return fold_build2 (swap_tree_comparison (code), type,
8915 TREE_OPERAND (arg0, 0),
8916 fold_build1 (BIT_NOT_EXPR, cmp_type,
8917 fold_convert (cmp_type, arg1)));
8924 /* Subroutine of fold_binary. Optimize complex multiplications of the
8925 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8926 argument EXPR represents the expression "z" of type TYPE. */
8929 fold_mult_zconjz (tree type, tree expr)
8931 tree itype = TREE_TYPE (type);
8932 tree rpart, ipart, tem;
8934 if (TREE_CODE (expr) == COMPLEX_EXPR)
8936 rpart = TREE_OPERAND (expr, 0);
8937 ipart = TREE_OPERAND (expr, 1);
8939 else if (TREE_CODE (expr) == COMPLEX_CST)
8941 rpart = TREE_REALPART (expr);
8942 ipart = TREE_IMAGPART (expr);
8946 expr = save_expr (expr);
8947 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8948 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8951 rpart = save_expr (rpart);
8952 ipart = save_expr (ipart);
8953 tem = fold_build2 (PLUS_EXPR, itype,
8954 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8955 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8956 return fold_build2 (COMPLEX_EXPR, type, tem,
8957 fold_convert (itype, integer_zero_node));
8961 /* Fold a binary expression of code CODE and type TYPE with operands
8962 OP0 and OP1. Return the folded expression if folding is
8963 successful. Otherwise, return NULL_TREE. */
8966 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8968 enum tree_code_class kind = TREE_CODE_CLASS (code);
8969 tree arg0, arg1, tem;
8970 tree t1 = NULL_TREE;
8971 bool strict_overflow_p;
8973 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8974 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8975 && TREE_CODE_LENGTH (code) == 2
8977 && op1 != NULL_TREE);
8982 /* Strip any conversions that don't change the mode. This is
8983 safe for every expression, except for a comparison expression
8984 because its signedness is derived from its operands. So, in
8985 the latter case, only strip conversions that don't change the
8988 Note that this is done as an internal manipulation within the
8989 constant folder, in order to find the simplest representation
8990 of the arguments so that their form can be studied. In any
8991 cases, the appropriate type conversions should be put back in
8992 the tree that will get out of the constant folder. */
8994 if (kind == tcc_comparison)
8996 STRIP_SIGN_NOPS (arg0);
8997 STRIP_SIGN_NOPS (arg1);
9005 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9006 constant but we can't do arithmetic on them. */
9007 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9008 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9009 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9010 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9012 if (kind == tcc_binary)
9013 tem = const_binop (code, arg0, arg1, 0);
9014 else if (kind == tcc_comparison)
9015 tem = fold_relational_const (code, type, arg0, arg1);
9019 if (tem != NULL_TREE)
9021 if (TREE_TYPE (tem) != type)
9022 tem = fold_convert (type, tem);
9027 /* If this is a commutative operation, and ARG0 is a constant, move it
9028 to ARG1 to reduce the number of tests below. */
9029 if (commutative_tree_code (code)
9030 && tree_swap_operands_p (arg0, arg1, true))
9031 return fold_build2 (code, type, op1, op0);
9033 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9035 First check for cases where an arithmetic operation is applied to a
9036 compound, conditional, or comparison operation. Push the arithmetic
9037 operation inside the compound or conditional to see if any folding
9038 can then be done. Convert comparison to conditional for this purpose.
9039 The also optimizes non-constant cases that used to be done in
9042 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9043 one of the operands is a comparison and the other is a comparison, a
9044 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9045 code below would make the expression more complex. Change it to a
9046 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9047 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9049 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9050 || code == EQ_EXPR || code == NE_EXPR)
9051 && ((truth_value_p (TREE_CODE (arg0))
9052 && (truth_value_p (TREE_CODE (arg1))
9053 || (TREE_CODE (arg1) == BIT_AND_EXPR
9054 && integer_onep (TREE_OPERAND (arg1, 1)))))
9055 || (truth_value_p (TREE_CODE (arg1))
9056 && (truth_value_p (TREE_CODE (arg0))
9057 || (TREE_CODE (arg0) == BIT_AND_EXPR
9058 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9060 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9061 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9064 fold_convert (boolean_type_node, arg0),
9065 fold_convert (boolean_type_node, arg1));
9067 if (code == EQ_EXPR)
9068 tem = invert_truthvalue (tem);
9070 return fold_convert (type, tem);
9073 if (TREE_CODE_CLASS (code) == tcc_binary
9074 || TREE_CODE_CLASS (code) == tcc_comparison)
9076 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9077 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9078 fold_build2 (code, type,
9079 TREE_OPERAND (arg0, 1), op1));
9080 if (TREE_CODE (arg1) == COMPOUND_EXPR
9081 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9082 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9083 fold_build2 (code, type,
9084 op0, TREE_OPERAND (arg1, 1)));
9086 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9088 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9090 /*cond_first_p=*/1);
9091 if (tem != NULL_TREE)
9095 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9097 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9099 /*cond_first_p=*/0);
9100 if (tem != NULL_TREE)
9108 /* A + (-B) -> A - B */
9109 if (TREE_CODE (arg1) == NEGATE_EXPR)
9110 return fold_build2 (MINUS_EXPR, type,
9111 fold_convert (type, arg0),
9112 fold_convert (type, TREE_OPERAND (arg1, 0)));
9113 /* (-A) + B -> B - A */
9114 if (TREE_CODE (arg0) == NEGATE_EXPR
9115 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9116 return fold_build2 (MINUS_EXPR, type,
9117 fold_convert (type, arg1),
9118 fold_convert (type, TREE_OPERAND (arg0, 0)));
9119 /* Convert ~A + 1 to -A. */
9120 if (INTEGRAL_TYPE_P (type)
9121 && TREE_CODE (arg0) == BIT_NOT_EXPR
9122 && integer_onep (arg1))
9123 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9125 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9127 if ((TREE_CODE (arg0) == MULT_EXPR
9128 || TREE_CODE (arg1) == MULT_EXPR)
9129 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9131 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9136 if (! FLOAT_TYPE_P (type))
9138 if (integer_zerop (arg1))
9139 return non_lvalue (fold_convert (type, arg0));
9142 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9143 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9144 && !TYPE_OVERFLOW_TRAPS (type))
9146 t1 = build_int_cst_type (type, -1);
9147 return omit_one_operand (type, t1, arg1);
9151 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9152 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9153 && !TYPE_OVERFLOW_TRAPS (type))
9155 t1 = build_int_cst_type (type, -1);
9156 return omit_one_operand (type, t1, arg0);
9159 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9160 with a constant, and the two constants have no bits in common,
9161 we should treat this as a BIT_IOR_EXPR since this may produce more
9163 if (TREE_CODE (arg0) == BIT_AND_EXPR
9164 && TREE_CODE (arg1) == BIT_AND_EXPR
9165 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9166 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9167 && integer_zerop (const_binop (BIT_AND_EXPR,
9168 TREE_OPERAND (arg0, 1),
9169 TREE_OPERAND (arg1, 1), 0)))
9171 code = BIT_IOR_EXPR;
9175 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9176 (plus (plus (mult) (mult)) (foo)) so that we can
9177 take advantage of the factoring cases below. */
9178 if (((TREE_CODE (arg0) == PLUS_EXPR
9179 || TREE_CODE (arg0) == MINUS_EXPR)
9180 && TREE_CODE (arg1) == MULT_EXPR)
9181 || ((TREE_CODE (arg1) == PLUS_EXPR
9182 || TREE_CODE (arg1) == MINUS_EXPR)
9183 && TREE_CODE (arg0) == MULT_EXPR))
9185 tree parg0, parg1, parg, marg;
9186 enum tree_code pcode;
9188 if (TREE_CODE (arg1) == MULT_EXPR)
9189 parg = arg0, marg = arg1;
9191 parg = arg1, marg = arg0;
9192 pcode = TREE_CODE (parg);
9193 parg0 = TREE_OPERAND (parg, 0);
9194 parg1 = TREE_OPERAND (parg, 1);
9198 if (TREE_CODE (parg0) == MULT_EXPR
9199 && TREE_CODE (parg1) != MULT_EXPR)
9200 return fold_build2 (pcode, type,
9201 fold_build2 (PLUS_EXPR, type,
9202 fold_convert (type, parg0),
9203 fold_convert (type, marg)),
9204 fold_convert (type, parg1));
9205 if (TREE_CODE (parg0) != MULT_EXPR
9206 && TREE_CODE (parg1) == MULT_EXPR)
9207 return fold_build2 (PLUS_EXPR, type,
9208 fold_convert (type, parg0),
9209 fold_build2 (pcode, type,
9210 fold_convert (type, marg),
9215 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
9216 of the array. Loop optimizer sometimes produce this type of
9218 if (TREE_CODE (arg0) == ADDR_EXPR)
9220 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
9222 return fold_convert (type, tem);
9224 else if (TREE_CODE (arg1) == ADDR_EXPR)
9226 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
9228 return fold_convert (type, tem);
9233 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9234 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9235 return non_lvalue (fold_convert (type, arg0));
9237 /* Likewise if the operands are reversed. */
9238 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9239 return non_lvalue (fold_convert (type, arg1));
9241 /* Convert X + -C into X - C. */
9242 if (TREE_CODE (arg1) == REAL_CST
9243 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9245 tem = fold_negate_const (arg1, type);
9246 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9247 return fold_build2 (MINUS_EXPR, type,
9248 fold_convert (type, arg0),
9249 fold_convert (type, tem));
9252 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9253 to __complex__ ( x, y ). This is not the same for SNaNs or
9254 if signed zeros are involved. */
9255 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9256 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9257 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9259 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9260 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9261 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9262 bool arg0rz = false, arg0iz = false;
9263 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9264 || (arg0i && (arg0iz = real_zerop (arg0i))))
9266 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9267 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9268 if (arg0rz && arg1i && real_zerop (arg1i))
9270 tree rp = arg1r ? arg1r
9271 : build1 (REALPART_EXPR, rtype, arg1);
9272 tree ip = arg0i ? arg0i
9273 : build1 (IMAGPART_EXPR, rtype, arg0);
9274 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9276 else if (arg0iz && arg1r && real_zerop (arg1r))
9278 tree rp = arg0r ? arg0r
9279 : build1 (REALPART_EXPR, rtype, arg0);
9280 tree ip = arg1i ? arg1i
9281 : build1 (IMAGPART_EXPR, rtype, arg1);
9282 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9287 if (flag_unsafe_math_optimizations
9288 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9289 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9290 && (tem = distribute_real_division (code, type, arg0, arg1)))
9293 /* Convert x+x into x*2.0. */
9294 if (operand_equal_p (arg0, arg1, 0)
9295 && SCALAR_FLOAT_TYPE_P (type))
9296 return fold_build2 (MULT_EXPR, type, arg0,
9297 build_real (type, dconst2));
9299 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9300 if (flag_unsafe_math_optimizations
9301 && TREE_CODE (arg1) == PLUS_EXPR
9302 && TREE_CODE (arg0) != MULT_EXPR)
9304 tree tree10 = TREE_OPERAND (arg1, 0);
9305 tree tree11 = TREE_OPERAND (arg1, 1);
9306 if (TREE_CODE (tree11) == MULT_EXPR
9307 && TREE_CODE (tree10) == MULT_EXPR)
9310 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9311 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9314 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9315 if (flag_unsafe_math_optimizations
9316 && TREE_CODE (arg0) == PLUS_EXPR
9317 && TREE_CODE (arg1) != MULT_EXPR)
9319 tree tree00 = TREE_OPERAND (arg0, 0);
9320 tree tree01 = TREE_OPERAND (arg0, 1);
9321 if (TREE_CODE (tree01) == MULT_EXPR
9322 && TREE_CODE (tree00) == MULT_EXPR)
9325 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9326 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9332 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9333 is a rotate of A by C1 bits. */
9334 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9335 is a rotate of A by B bits. */
9337 enum tree_code code0, code1;
9338 code0 = TREE_CODE (arg0);
9339 code1 = TREE_CODE (arg1);
9340 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9341 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9342 && operand_equal_p (TREE_OPERAND (arg0, 0),
9343 TREE_OPERAND (arg1, 0), 0)
9344 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9346 tree tree01, tree11;
9347 enum tree_code code01, code11;
9349 tree01 = TREE_OPERAND (arg0, 1);
9350 tree11 = TREE_OPERAND (arg1, 1);
9351 STRIP_NOPS (tree01);
9352 STRIP_NOPS (tree11);
9353 code01 = TREE_CODE (tree01);
9354 code11 = TREE_CODE (tree11);
9355 if (code01 == INTEGER_CST
9356 && code11 == INTEGER_CST
9357 && TREE_INT_CST_HIGH (tree01) == 0
9358 && TREE_INT_CST_HIGH (tree11) == 0
9359 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9360 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9361 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9362 code0 == LSHIFT_EXPR ? tree01 : tree11);
9363 else if (code11 == MINUS_EXPR)
9365 tree tree110, tree111;
9366 tree110 = TREE_OPERAND (tree11, 0);
9367 tree111 = TREE_OPERAND (tree11, 1);
9368 STRIP_NOPS (tree110);
9369 STRIP_NOPS (tree111);
9370 if (TREE_CODE (tree110) == INTEGER_CST
9371 && 0 == compare_tree_int (tree110,
9373 (TREE_TYPE (TREE_OPERAND
9375 && operand_equal_p (tree01, tree111, 0))
9376 return build2 ((code0 == LSHIFT_EXPR
9379 type, TREE_OPERAND (arg0, 0), tree01);
9381 else if (code01 == MINUS_EXPR)
9383 tree tree010, tree011;
9384 tree010 = TREE_OPERAND (tree01, 0);
9385 tree011 = TREE_OPERAND (tree01, 1);
9386 STRIP_NOPS (tree010);
9387 STRIP_NOPS (tree011);
9388 if (TREE_CODE (tree010) == INTEGER_CST
9389 && 0 == compare_tree_int (tree010,
9391 (TREE_TYPE (TREE_OPERAND
9393 && operand_equal_p (tree11, tree011, 0))
9394 return build2 ((code0 != LSHIFT_EXPR
9397 type, TREE_OPERAND (arg0, 0), tree11);
9403 /* In most languages, can't associate operations on floats through
9404 parentheses. Rather than remember where the parentheses were, we
9405 don't associate floats at all, unless the user has specified
9406 -funsafe-math-optimizations. */
9408 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9410 tree var0, con0, lit0, minus_lit0;
9411 tree var1, con1, lit1, minus_lit1;
9414 /* Split both trees into variables, constants, and literals. Then
9415 associate each group together, the constants with literals,
9416 then the result with variables. This increases the chances of
9417 literals being recombined later and of generating relocatable
9418 expressions for the sum of a constant and literal. */
9419 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9420 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9421 code == MINUS_EXPR);
9423 /* With undefined overflow we can only associate constants
9424 with one variable. */
9425 if ((POINTER_TYPE_P (type)
9426 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9432 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9433 tmp0 = TREE_OPERAND (tmp0, 0);
9434 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9435 tmp1 = TREE_OPERAND (tmp1, 0);
9436 /* The only case we can still associate with two variables
9437 is if they are the same, modulo negation. */
9438 if (!operand_equal_p (tmp0, tmp1, 0))
9442 /* Only do something if we found more than two objects. Otherwise,
9443 nothing has changed and we risk infinite recursion. */
9445 && (2 < ((var0 != 0) + (var1 != 0)
9446 + (con0 != 0) + (con1 != 0)
9447 + (lit0 != 0) + (lit1 != 0)
9448 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9450 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9451 if (code == MINUS_EXPR)
9454 var0 = associate_trees (var0, var1, code, type);
9455 con0 = associate_trees (con0, con1, code, type);
9456 lit0 = associate_trees (lit0, lit1, code, type);
9457 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9459 /* Preserve the MINUS_EXPR if the negative part of the literal is
9460 greater than the positive part. Otherwise, the multiplicative
9461 folding code (i.e extract_muldiv) may be fooled in case
9462 unsigned constants are subtracted, like in the following
9463 example: ((X*2 + 4) - 8U)/2. */
9464 if (minus_lit0 && lit0)
9466 if (TREE_CODE (lit0) == INTEGER_CST
9467 && TREE_CODE (minus_lit0) == INTEGER_CST
9468 && tree_int_cst_lt (lit0, minus_lit0))
9470 minus_lit0 = associate_trees (minus_lit0, lit0,
9476 lit0 = associate_trees (lit0, minus_lit0,
9484 return fold_convert (type,
9485 associate_trees (var0, minus_lit0,
9489 con0 = associate_trees (con0, minus_lit0,
9491 return fold_convert (type,
9492 associate_trees (var0, con0,
9497 con0 = associate_trees (con0, lit0, code, type);
9498 return fold_convert (type, associate_trees (var0, con0,
9506 /* A - (-B) -> A + B */
9507 if (TREE_CODE (arg1) == NEGATE_EXPR)
9508 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9509 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9510 if (TREE_CODE (arg0) == NEGATE_EXPR
9511 && (FLOAT_TYPE_P (type)
9512 || INTEGRAL_TYPE_P (type))
9513 && negate_expr_p (arg1)
9514 && reorder_operands_p (arg0, arg1))
9515 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9516 TREE_OPERAND (arg0, 0));
9517 /* Convert -A - 1 to ~A. */
9518 if (INTEGRAL_TYPE_P (type)
9519 && TREE_CODE (arg0) == NEGATE_EXPR
9520 && integer_onep (arg1)
9521 && !TYPE_OVERFLOW_TRAPS (type))
9522 return fold_build1 (BIT_NOT_EXPR, type,
9523 fold_convert (type, TREE_OPERAND (arg0, 0)));
9525 /* Convert -1 - A to ~A. */
9526 if (INTEGRAL_TYPE_P (type)
9527 && integer_all_onesp (arg0))
9528 return fold_build1 (BIT_NOT_EXPR, type, op1);
9530 if (! FLOAT_TYPE_P (type))
9532 if (integer_zerop (arg0))
9533 return negate_expr (fold_convert (type, arg1));
9534 if (integer_zerop (arg1))
9535 return non_lvalue (fold_convert (type, arg0));
9537 /* Fold A - (A & B) into ~B & A. */
9538 if (!TREE_SIDE_EFFECTS (arg0)
9539 && TREE_CODE (arg1) == BIT_AND_EXPR)
9541 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9542 return fold_build2 (BIT_AND_EXPR, type,
9543 fold_build1 (BIT_NOT_EXPR, type,
9544 TREE_OPERAND (arg1, 0)),
9546 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9547 return fold_build2 (BIT_AND_EXPR, type,
9548 fold_build1 (BIT_NOT_EXPR, type,
9549 TREE_OPERAND (arg1, 1)),
9553 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9554 any power of 2 minus 1. */
9555 if (TREE_CODE (arg0) == BIT_AND_EXPR
9556 && TREE_CODE (arg1) == BIT_AND_EXPR
9557 && operand_equal_p (TREE_OPERAND (arg0, 0),
9558 TREE_OPERAND (arg1, 0), 0))
9560 tree mask0 = TREE_OPERAND (arg0, 1);
9561 tree mask1 = TREE_OPERAND (arg1, 1);
9562 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9564 if (operand_equal_p (tem, mask1, 0))
9566 tem = fold_build2 (BIT_XOR_EXPR, type,
9567 TREE_OPERAND (arg0, 0), mask1);
9568 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9573 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9574 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9575 return non_lvalue (fold_convert (type, arg0));
9577 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9578 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9579 (-ARG1 + ARG0) reduces to -ARG1. */
9580 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9581 return negate_expr (fold_convert (type, arg1));
9583 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9584 __complex__ ( x, -y ). This is not the same for SNaNs or if
9585 signed zeros are involved. */
9586 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9587 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9588 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9590 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9591 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9592 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9593 bool arg0rz = false, arg0iz = false;
9594 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9595 || (arg0i && (arg0iz = real_zerop (arg0i))))
9597 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9598 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9599 if (arg0rz && arg1i && real_zerop (arg1i))
9601 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9603 : build1 (REALPART_EXPR, rtype, arg1));
9604 tree ip = arg0i ? arg0i
9605 : build1 (IMAGPART_EXPR, rtype, arg0);
9606 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9608 else if (arg0iz && arg1r && real_zerop (arg1r))
9610 tree rp = arg0r ? arg0r
9611 : build1 (REALPART_EXPR, rtype, arg0);
9612 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9614 : build1 (IMAGPART_EXPR, rtype, arg1));
9615 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9620 /* Fold &x - &x. This can happen from &x.foo - &x.
9621 This is unsafe for certain floats even in non-IEEE formats.
9622 In IEEE, it is unsafe because it does wrong for NaNs.
9623 Also note that operand_equal_p is always false if an operand
9626 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9627 && operand_equal_p (arg0, arg1, 0))
9628 return fold_convert (type, integer_zero_node);
9630 /* A - B -> A + (-B) if B is easily negatable. */
9631 if (negate_expr_p (arg1)
9632 && ((FLOAT_TYPE_P (type)
9633 /* Avoid this transformation if B is a positive REAL_CST. */
9634 && (TREE_CODE (arg1) != REAL_CST
9635 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9636 || INTEGRAL_TYPE_P (type)))
9637 return fold_build2 (PLUS_EXPR, type,
9638 fold_convert (type, arg0),
9639 fold_convert (type, negate_expr (arg1)));
9641 /* Try folding difference of addresses. */
9645 if ((TREE_CODE (arg0) == ADDR_EXPR
9646 || TREE_CODE (arg1) == ADDR_EXPR)
9647 && ptr_difference_const (arg0, arg1, &diff))
9648 return build_int_cst_type (type, diff);
9651 /* Fold &a[i] - &a[j] to i-j. */
9652 if (TREE_CODE (arg0) == ADDR_EXPR
9653 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9654 && TREE_CODE (arg1) == ADDR_EXPR
9655 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9657 tree aref0 = TREE_OPERAND (arg0, 0);
9658 tree aref1 = TREE_OPERAND (arg1, 0);
9659 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9660 TREE_OPERAND (aref1, 0), 0))
9662 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9663 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9664 tree esz = array_ref_element_size (aref0);
9665 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9666 return fold_build2 (MULT_EXPR, type, diff,
9667 fold_convert (type, esz));
9672 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9673 of the array. Loop optimizer sometimes produce this type of
9675 if (TREE_CODE (arg0) == ADDR_EXPR)
9677 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9679 return fold_convert (type, tem);
9682 if (flag_unsafe_math_optimizations
9683 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9684 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9685 && (tem = distribute_real_division (code, type, arg0, arg1)))
9688 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9690 if ((TREE_CODE (arg0) == MULT_EXPR
9691 || TREE_CODE (arg1) == MULT_EXPR)
9692 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9694 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9702 /* (-A) * (-B) -> A * B */
9703 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9704 return fold_build2 (MULT_EXPR, type,
9705 fold_convert (type, TREE_OPERAND (arg0, 0)),
9706 fold_convert (type, negate_expr (arg1)));
9707 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9708 return fold_build2 (MULT_EXPR, type,
9709 fold_convert (type, negate_expr (arg0)),
9710 fold_convert (type, TREE_OPERAND (arg1, 0)));
9712 if (! FLOAT_TYPE_P (type))
9714 if (integer_zerop (arg1))
9715 return omit_one_operand (type, arg1, arg0);
9716 if (integer_onep (arg1))
9717 return non_lvalue (fold_convert (type, arg0));
9718 /* Transform x * -1 into -x. */
9719 if (integer_all_onesp (arg1))
9720 return fold_convert (type, negate_expr (arg0));
9721 /* Transform x * -C into -x * C if x is easily negatable. */
9722 if (TREE_CODE (arg1) == INTEGER_CST
9723 && tree_int_cst_sgn (arg1) == -1
9724 && negate_expr_p (arg0)
9725 && (tem = negate_expr (arg1)) != arg1
9726 && !TREE_OVERFLOW (tem))
9727 return fold_build2 (MULT_EXPR, type,
9728 negate_expr (arg0), tem);
9730 /* (a * (1 << b)) is (a << b) */
9731 if (TREE_CODE (arg1) == LSHIFT_EXPR
9732 && integer_onep (TREE_OPERAND (arg1, 0)))
9733 return fold_build2 (LSHIFT_EXPR, type, arg0,
9734 TREE_OPERAND (arg1, 1));
9735 if (TREE_CODE (arg0) == LSHIFT_EXPR
9736 && integer_onep (TREE_OPERAND (arg0, 0)))
9737 return fold_build2 (LSHIFT_EXPR, type, arg1,
9738 TREE_OPERAND (arg0, 1));
9740 strict_overflow_p = false;
9741 if (TREE_CODE (arg1) == INTEGER_CST
9742 && 0 != (tem = extract_muldiv (op0,
9743 fold_convert (type, arg1),
9745 &strict_overflow_p)))
9747 if (strict_overflow_p)
9748 fold_overflow_warning (("assuming signed overflow does not "
9749 "occur when simplifying "
9751 WARN_STRICT_OVERFLOW_MISC);
9752 return fold_convert (type, tem);
9755 /* Optimize z * conj(z) for integer complex numbers. */
9756 if (TREE_CODE (arg0) == CONJ_EXPR
9757 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9758 return fold_mult_zconjz (type, arg1);
9759 if (TREE_CODE (arg1) == CONJ_EXPR
9760 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9761 return fold_mult_zconjz (type, arg0);
9765 /* Maybe fold x * 0 to 0. The expressions aren't the same
9766 when x is NaN, since x * 0 is also NaN. Nor are they the
9767 same in modes with signed zeros, since multiplying a
9768 negative value by 0 gives -0, not +0. */
9769 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9770 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9771 && real_zerop (arg1))
9772 return omit_one_operand (type, arg1, arg0);
9773 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9774 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9775 && real_onep (arg1))
9776 return non_lvalue (fold_convert (type, arg0));
9778 /* Transform x * -1.0 into -x. */
9779 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9780 && real_minus_onep (arg1))
9781 return fold_convert (type, negate_expr (arg0));
9783 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9784 if (flag_unsafe_math_optimizations
9785 && TREE_CODE (arg0) == RDIV_EXPR
9786 && TREE_CODE (arg1) == REAL_CST
9787 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9789 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9792 return fold_build2 (RDIV_EXPR, type, tem,
9793 TREE_OPERAND (arg0, 1));
9796 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9797 if (operand_equal_p (arg0, arg1, 0))
9799 tree tem = fold_strip_sign_ops (arg0);
9800 if (tem != NULL_TREE)
9802 tem = fold_convert (type, tem);
9803 return fold_build2 (MULT_EXPR, type, tem, tem);
9807 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9808 This is not the same for NaNs or if signed zeros are
9810 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9811 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9812 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9813 && TREE_CODE (arg1) == COMPLEX_CST
9814 && real_zerop (TREE_REALPART (arg1)))
9816 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9817 if (real_onep (TREE_IMAGPART (arg1)))
9818 return fold_build2 (COMPLEX_EXPR, type,
9819 negate_expr (fold_build1 (IMAGPART_EXPR,
9821 fold_build1 (REALPART_EXPR, rtype, arg0));
9822 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9823 return fold_build2 (COMPLEX_EXPR, type,
9824 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9825 negate_expr (fold_build1 (REALPART_EXPR,
9829 /* Optimize z * conj(z) for floating point complex numbers.
9830 Guarded by flag_unsafe_math_optimizations as non-finite
9831 imaginary components don't produce scalar results. */
9832 if (flag_unsafe_math_optimizations
9833 && TREE_CODE (arg0) == CONJ_EXPR
9834 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9835 return fold_mult_zconjz (type, arg1);
9836 if (flag_unsafe_math_optimizations
9837 && TREE_CODE (arg1) == CONJ_EXPR
9838 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9839 return fold_mult_zconjz (type, arg0);
9841 if (flag_unsafe_math_optimizations)
9843 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9844 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9846 /* Optimizations of root(...)*root(...). */
9847 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9850 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9851 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9853 /* Optimize sqrt(x)*sqrt(x) as x. */
9854 if (BUILTIN_SQRT_P (fcode0)
9855 && operand_equal_p (arg00, arg10, 0)
9856 && ! HONOR_SNANS (TYPE_MODE (type)))
9859 /* Optimize root(x)*root(y) as root(x*y). */
9860 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9861 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9862 return build_call_expr (rootfn, 1, arg);
9865 /* Optimize expN(x)*expN(y) as expN(x+y). */
9866 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9868 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9869 tree arg = fold_build2 (PLUS_EXPR, type,
9870 CALL_EXPR_ARG (arg0, 0),
9871 CALL_EXPR_ARG (arg1, 0));
9872 return build_call_expr (expfn, 1, arg);
9875 /* Optimizations of pow(...)*pow(...). */
9876 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9877 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9878 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9880 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9881 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9882 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9883 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9885 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9886 if (operand_equal_p (arg01, arg11, 0))
9888 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9889 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9890 return build_call_expr (powfn, 2, arg, arg01);
9893 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9894 if (operand_equal_p (arg00, arg10, 0))
9896 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9897 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9898 return build_call_expr (powfn, 2, arg00, arg);
9902 /* Optimize tan(x)*cos(x) as sin(x). */
9903 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9904 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9905 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9906 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9907 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9908 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9909 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
9910 CALL_EXPR_ARG (arg1, 0), 0))
9912 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9914 if (sinfn != NULL_TREE)
9915 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
9918 /* Optimize x*pow(x,c) as pow(x,c+1). */
9919 if (fcode1 == BUILT_IN_POW
9920 || fcode1 == BUILT_IN_POWF
9921 || fcode1 == BUILT_IN_POWL)
9923 tree arg10 = CALL_EXPR_ARG (arg1, 0);
9924 tree arg11 = CALL_EXPR_ARG (arg1, 1);
9925 if (TREE_CODE (arg11) == REAL_CST
9926 && !TREE_OVERFLOW (arg11)
9927 && operand_equal_p (arg0, arg10, 0))
9929 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
9933 c = TREE_REAL_CST (arg11);
9934 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9935 arg = build_real (type, c);
9936 return build_call_expr (powfn, 2, arg0, arg);
9940 /* Optimize pow(x,c)*x as pow(x,c+1). */
9941 if (fcode0 == BUILT_IN_POW
9942 || fcode0 == BUILT_IN_POWF
9943 || fcode0 == BUILT_IN_POWL)
9945 tree arg00 = CALL_EXPR_ARG (arg0, 0);
9946 tree arg01 = CALL_EXPR_ARG (arg0, 1);
9947 if (TREE_CODE (arg01) == REAL_CST
9948 && !TREE_OVERFLOW (arg01)
9949 && operand_equal_p (arg1, arg00, 0))
9951 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
9955 c = TREE_REAL_CST (arg01);
9956 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9957 arg = build_real (type, c);
9958 return build_call_expr (powfn, 2, arg1, arg);
9962 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9964 && operand_equal_p (arg0, arg1, 0))
9966 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9970 tree arg = build_real (type, dconst2);
9971 return build_call_expr (powfn, 2, arg0, arg);
9980 if (integer_all_onesp (arg1))
9981 return omit_one_operand (type, arg1, arg0);
9982 if (integer_zerop (arg1))
9983 return non_lvalue (fold_convert (type, arg0));
9984 if (operand_equal_p (arg0, arg1, 0))
9985 return non_lvalue (fold_convert (type, arg0));
9988 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9989 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9991 t1 = build_int_cst_type (type, -1);
9992 return omit_one_operand (type, t1, arg1);
9996 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9997 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9999 t1 = build_int_cst_type (type, -1);
10000 return omit_one_operand (type, t1, arg0);
10003 /* Canonicalize (X & C1) | C2. */
10004 if (TREE_CODE (arg0) == BIT_AND_EXPR
10005 && TREE_CODE (arg1) == INTEGER_CST
10006 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10008 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10009 int width = TYPE_PRECISION (type);
10010 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10011 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10012 hi2 = TREE_INT_CST_HIGH (arg1);
10013 lo2 = TREE_INT_CST_LOW (arg1);
10015 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10016 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10017 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10019 if (width > HOST_BITS_PER_WIDE_INT)
10021 mhi = (unsigned HOST_WIDE_INT) -1
10022 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10028 mlo = (unsigned HOST_WIDE_INT) -1
10029 >> (HOST_BITS_PER_WIDE_INT - width);
10032 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10033 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10034 return fold_build2 (BIT_IOR_EXPR, type,
10035 TREE_OPERAND (arg0, 0), arg1);
10037 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10040 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10041 return fold_build2 (BIT_IOR_EXPR, type,
10042 fold_build2 (BIT_AND_EXPR, type,
10043 TREE_OPERAND (arg0, 0),
10044 build_int_cst_wide (type,
10050 /* (X & Y) | Y is (X, Y). */
10051 if (TREE_CODE (arg0) == BIT_AND_EXPR
10052 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10053 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10054 /* (X & Y) | X is (Y, X). */
10055 if (TREE_CODE (arg0) == BIT_AND_EXPR
10056 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10057 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10058 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10059 /* X | (X & Y) is (Y, X). */
10060 if (TREE_CODE (arg1) == BIT_AND_EXPR
10061 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10062 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10063 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10064 /* X | (Y & X) is (Y, X). */
10065 if (TREE_CODE (arg1) == BIT_AND_EXPR
10066 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10067 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10068 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10070 t1 = distribute_bit_expr (code, type, arg0, arg1);
10071 if (t1 != NULL_TREE)
10074 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10076 This results in more efficient code for machines without a NAND
10077 instruction. Combine will canonicalize to the first form
10078 which will allow use of NAND instructions provided by the
10079 backend if they exist. */
10080 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10081 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10083 return fold_build1 (BIT_NOT_EXPR, type,
10084 build2 (BIT_AND_EXPR, type,
10085 TREE_OPERAND (arg0, 0),
10086 TREE_OPERAND (arg1, 0)));
10089 /* See if this can be simplified into a rotate first. If that
10090 is unsuccessful continue in the association code. */
10094 if (integer_zerop (arg1))
10095 return non_lvalue (fold_convert (type, arg0));
10096 if (integer_all_onesp (arg1))
10097 return fold_build1 (BIT_NOT_EXPR, type, arg0);
10098 if (operand_equal_p (arg0, arg1, 0))
10099 return omit_one_operand (type, integer_zero_node, arg0);
10101 /* ~X ^ X is -1. */
10102 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10103 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10105 t1 = build_int_cst_type (type, -1);
10106 return omit_one_operand (type, t1, arg1);
10109 /* X ^ ~X is -1. */
10110 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10111 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10113 t1 = build_int_cst_type (type, -1);
10114 return omit_one_operand (type, t1, arg0);
10117 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10118 with a constant, and the two constants have no bits in common,
10119 we should treat this as a BIT_IOR_EXPR since this may produce more
10120 simplifications. */
10121 if (TREE_CODE (arg0) == BIT_AND_EXPR
10122 && TREE_CODE (arg1) == BIT_AND_EXPR
10123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10124 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10125 && integer_zerop (const_binop (BIT_AND_EXPR,
10126 TREE_OPERAND (arg0, 1),
10127 TREE_OPERAND (arg1, 1), 0)))
10129 code = BIT_IOR_EXPR;
10133 /* (X | Y) ^ X -> Y & ~ X*/
10134 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10135 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10137 tree t2 = TREE_OPERAND (arg0, 1);
10138 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10140 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10141 fold_convert (type, t1));
10145 /* (Y | X) ^ X -> Y & ~ X*/
10146 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10147 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10149 tree t2 = TREE_OPERAND (arg0, 0);
10150 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10152 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10153 fold_convert (type, t1));
10157 /* X ^ (X | Y) -> Y & ~ X*/
10158 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10159 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10161 tree t2 = TREE_OPERAND (arg1, 1);
10162 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10164 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10165 fold_convert (type, t1));
10169 /* X ^ (Y | X) -> Y & ~ X*/
10170 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10171 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10173 tree t2 = TREE_OPERAND (arg1, 0);
10174 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10176 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10177 fold_convert (type, t1));
10181 /* Convert ~X ^ ~Y to X ^ Y. */
10182 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10183 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10184 return fold_build2 (code, type,
10185 fold_convert (type, TREE_OPERAND (arg0, 0)),
10186 fold_convert (type, TREE_OPERAND (arg1, 0)));
10188 /* Convert ~X ^ C to X ^ ~C. */
10189 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10190 && TREE_CODE (arg1) == INTEGER_CST)
10191 return fold_build2 (code, type,
10192 fold_convert (type, TREE_OPERAND (arg0, 0)),
10193 fold_build1 (BIT_NOT_EXPR, type, arg1));
10195 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10196 if (TREE_CODE (arg0) == BIT_AND_EXPR
10197 && integer_onep (TREE_OPERAND (arg0, 1))
10198 && integer_onep (arg1))
10199 return fold_build2 (EQ_EXPR, type, arg0,
10200 build_int_cst (TREE_TYPE (arg0), 0));
10202 /* Fold (X & Y) ^ Y as ~X & Y. */
10203 if (TREE_CODE (arg0) == BIT_AND_EXPR
10204 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10206 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10207 return fold_build2 (BIT_AND_EXPR, type,
10208 fold_build1 (BIT_NOT_EXPR, type, tem),
10209 fold_convert (type, arg1));
10211 /* Fold (X & Y) ^ X as ~Y & X. */
10212 if (TREE_CODE (arg0) == BIT_AND_EXPR
10213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10214 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10216 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10217 return fold_build2 (BIT_AND_EXPR, type,
10218 fold_build1 (BIT_NOT_EXPR, type, tem),
10219 fold_convert (type, arg1));
10221 /* Fold X ^ (X & Y) as X & ~Y. */
10222 if (TREE_CODE (arg1) == BIT_AND_EXPR
10223 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10225 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10226 return fold_build2 (BIT_AND_EXPR, type,
10227 fold_convert (type, arg0),
10228 fold_build1 (BIT_NOT_EXPR, type, tem));
10230 /* Fold X ^ (Y & X) as ~Y & X. */
10231 if (TREE_CODE (arg1) == BIT_AND_EXPR
10232 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10233 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10235 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10236 return fold_build2 (BIT_AND_EXPR, type,
10237 fold_build1 (BIT_NOT_EXPR, type, tem),
10238 fold_convert (type, arg0));
10241 /* See if this can be simplified into a rotate first. If that
10242 is unsuccessful continue in the association code. */
10246 if (integer_all_onesp (arg1))
10247 return non_lvalue (fold_convert (type, arg0));
10248 if (integer_zerop (arg1))
10249 return omit_one_operand (type, arg1, arg0);
10250 if (operand_equal_p (arg0, arg1, 0))
10251 return non_lvalue (fold_convert (type, arg0));
10253 /* ~X & X is always zero. */
10254 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10255 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10256 return omit_one_operand (type, integer_zero_node, arg1);
10258 /* X & ~X is always zero. */
10259 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10260 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10261 return omit_one_operand (type, integer_zero_node, arg0);
10263 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10264 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10265 && TREE_CODE (arg1) == INTEGER_CST
10266 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10267 return fold_build2 (BIT_IOR_EXPR, type,
10268 fold_build2 (BIT_AND_EXPR, type,
10269 TREE_OPERAND (arg0, 0), arg1),
10270 fold_build2 (BIT_AND_EXPR, type,
10271 TREE_OPERAND (arg0, 1), arg1));
10273 /* (X | Y) & Y is (X, Y). */
10274 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10275 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10276 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10277 /* (X | Y) & X is (Y, X). */
10278 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10279 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10280 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10281 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10282 /* X & (X | Y) is (Y, X). */
10283 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10284 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10285 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10286 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10287 /* X & (Y | X) is (Y, X). */
10288 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10289 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10290 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10291 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10293 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10294 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10295 && integer_onep (TREE_OPERAND (arg0, 1))
10296 && integer_onep (arg1))
10298 tem = TREE_OPERAND (arg0, 0);
10299 return fold_build2 (EQ_EXPR, type,
10300 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10301 build_int_cst (TREE_TYPE (tem), 1)),
10302 build_int_cst (TREE_TYPE (tem), 0));
10304 /* Fold ~X & 1 as (X & 1) == 0. */
10305 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10306 && integer_onep (arg1))
10308 tem = TREE_OPERAND (arg0, 0);
10309 return fold_build2 (EQ_EXPR, type,
10310 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10311 build_int_cst (TREE_TYPE (tem), 1)),
10312 build_int_cst (TREE_TYPE (tem), 0));
10315 /* Fold (X ^ Y) & Y as ~X & Y. */
10316 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10317 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10319 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10320 return fold_build2 (BIT_AND_EXPR, type,
10321 fold_build1 (BIT_NOT_EXPR, type, tem),
10322 fold_convert (type, arg1));
10324 /* Fold (X ^ Y) & X as ~Y & X. */
10325 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10326 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10327 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10329 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10330 return fold_build2 (BIT_AND_EXPR, type,
10331 fold_build1 (BIT_NOT_EXPR, type, tem),
10332 fold_convert (type, arg1));
10334 /* Fold X & (X ^ Y) as X & ~Y. */
10335 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10336 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10338 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10339 return fold_build2 (BIT_AND_EXPR, type,
10340 fold_convert (type, arg0),
10341 fold_build1 (BIT_NOT_EXPR, type, tem));
10343 /* Fold X & (Y ^ X) as ~Y & X. */
10344 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10345 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10346 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10348 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10349 return fold_build2 (BIT_AND_EXPR, type,
10350 fold_build1 (BIT_NOT_EXPR, type, tem),
10351 fold_convert (type, arg0));
10354 t1 = distribute_bit_expr (code, type, arg0, arg1);
10355 if (t1 != NULL_TREE)
10357 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10358 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10359 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10362 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10364 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10365 && (~TREE_INT_CST_LOW (arg1)
10366 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10367 return fold_convert (type, TREE_OPERAND (arg0, 0));
10370 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10372 This results in more efficient code for machines without a NOR
10373 instruction. Combine will canonicalize to the first form
10374 which will allow use of NOR instructions provided by the
10375 backend if they exist. */
10376 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10377 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10379 return fold_build1 (BIT_NOT_EXPR, type,
10380 build2 (BIT_IOR_EXPR, type,
10381 TREE_OPERAND (arg0, 0),
10382 TREE_OPERAND (arg1, 0)));
10388 /* Don't touch a floating-point divide by zero unless the mode
10389 of the constant can represent infinity. */
10390 if (TREE_CODE (arg1) == REAL_CST
10391 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10392 && real_zerop (arg1))
10395 /* Optimize A / A to 1.0 if we don't care about
10396 NaNs or Infinities. Skip the transformation
10397 for non-real operands. */
10398 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10399 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10400 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10401 && operand_equal_p (arg0, arg1, 0))
10403 tree r = build_real (TREE_TYPE (arg0), dconst1);
10405 return omit_two_operands (type, r, arg0, arg1);
10408 /* The complex version of the above A / A optimization. */
10409 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10410 && operand_equal_p (arg0, arg1, 0))
10412 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10413 if (! HONOR_NANS (TYPE_MODE (elem_type))
10414 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10416 tree r = build_real (elem_type, dconst1);
10417 /* omit_two_operands will call fold_convert for us. */
10418 return omit_two_operands (type, r, arg0, arg1);
10422 /* (-A) / (-B) -> A / B */
10423 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10424 return fold_build2 (RDIV_EXPR, type,
10425 TREE_OPERAND (arg0, 0),
10426 negate_expr (arg1));
10427 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10428 return fold_build2 (RDIV_EXPR, type,
10429 negate_expr (arg0),
10430 TREE_OPERAND (arg1, 0));
10432 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10433 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10434 && real_onep (arg1))
10435 return non_lvalue (fold_convert (type, arg0));
10437 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10438 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10439 && real_minus_onep (arg1))
10440 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10442 /* If ARG1 is a constant, we can convert this to a multiply by the
10443 reciprocal. This does not have the same rounding properties,
10444 so only do this if -funsafe-math-optimizations. We can actually
10445 always safely do it if ARG1 is a power of two, but it's hard to
10446 tell if it is or not in a portable manner. */
10447 if (TREE_CODE (arg1) == REAL_CST)
10449 if (flag_unsafe_math_optimizations
10450 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10452 return fold_build2 (MULT_EXPR, type, arg0, tem);
10453 /* Find the reciprocal if optimizing and the result is exact. */
10457 r = TREE_REAL_CST (arg1);
10458 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10460 tem = build_real (type, r);
10461 return fold_build2 (MULT_EXPR, type,
10462 fold_convert (type, arg0), tem);
10466 /* Convert A/B/C to A/(B*C). */
10467 if (flag_unsafe_math_optimizations
10468 && TREE_CODE (arg0) == RDIV_EXPR)
10469 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10470 fold_build2 (MULT_EXPR, type,
10471 TREE_OPERAND (arg0, 1), arg1));
10473 /* Convert A/(B/C) to (A/B)*C. */
10474 if (flag_unsafe_math_optimizations
10475 && TREE_CODE (arg1) == RDIV_EXPR)
10476 return fold_build2 (MULT_EXPR, type,
10477 fold_build2 (RDIV_EXPR, type, arg0,
10478 TREE_OPERAND (arg1, 0)),
10479 TREE_OPERAND (arg1, 1));
10481 /* Convert C1/(X*C2) into (C1/C2)/X. */
10482 if (flag_unsafe_math_optimizations
10483 && TREE_CODE (arg1) == MULT_EXPR
10484 && TREE_CODE (arg0) == REAL_CST
10485 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10487 tree tem = const_binop (RDIV_EXPR, arg0,
10488 TREE_OPERAND (arg1, 1), 0);
10490 return fold_build2 (RDIV_EXPR, type, tem,
10491 TREE_OPERAND (arg1, 0));
10494 if (flag_unsafe_math_optimizations)
10496 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10497 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10499 /* Optimize sin(x)/cos(x) as tan(x). */
10500 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10501 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10502 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10503 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10504 CALL_EXPR_ARG (arg1, 0), 0))
10506 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10508 if (tanfn != NULL_TREE)
10509 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10512 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10513 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10514 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10515 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10516 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10517 CALL_EXPR_ARG (arg1, 0), 0))
10519 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10521 if (tanfn != NULL_TREE)
10523 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10524 return fold_build2 (RDIV_EXPR, type,
10525 build_real (type, dconst1), tmp);
10529 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10530 NaNs or Infinities. */
10531 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10532 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10533 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10535 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10536 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10538 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10539 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10540 && operand_equal_p (arg00, arg01, 0))
10542 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10544 if (cosfn != NULL_TREE)
10545 return build_call_expr (cosfn, 1, arg00);
10549 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10550 NaNs or Infinities. */
10551 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10552 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10553 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10555 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10556 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10558 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10559 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10560 && operand_equal_p (arg00, arg01, 0))
10562 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10564 if (cosfn != NULL_TREE)
10566 tree tmp = build_call_expr (cosfn, 1, arg00);
10567 return fold_build2 (RDIV_EXPR, type,
10568 build_real (type, dconst1),
10574 /* Optimize pow(x,c)/x as pow(x,c-1). */
10575 if (fcode0 == BUILT_IN_POW
10576 || fcode0 == BUILT_IN_POWF
10577 || fcode0 == BUILT_IN_POWL)
10579 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10580 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10581 if (TREE_CODE (arg01) == REAL_CST
10582 && !TREE_OVERFLOW (arg01)
10583 && operand_equal_p (arg1, arg00, 0))
10585 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10589 c = TREE_REAL_CST (arg01);
10590 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10591 arg = build_real (type, c);
10592 return build_call_expr (powfn, 2, arg1, arg);
10596 /* Optimize x/expN(y) into x*expN(-y). */
10597 if (BUILTIN_EXPONENT_P (fcode1))
10599 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10600 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10601 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10602 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10605 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10606 if (fcode1 == BUILT_IN_POW
10607 || fcode1 == BUILT_IN_POWF
10608 || fcode1 == BUILT_IN_POWL)
10610 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10611 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10612 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10613 tree neg11 = fold_convert (type, negate_expr (arg11));
10614 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10615 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10620 case TRUNC_DIV_EXPR:
10621 case FLOOR_DIV_EXPR:
10622 /* Simplify A / (B << N) where A and B are positive and B is
10623 a power of 2, to A >> (N + log2(B)). */
10624 strict_overflow_p = false;
10625 if (TREE_CODE (arg1) == LSHIFT_EXPR
10626 && (TYPE_UNSIGNED (type)
10627 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10629 tree sval = TREE_OPERAND (arg1, 0);
10630 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10632 tree sh_cnt = TREE_OPERAND (arg1, 1);
10633 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10635 if (strict_overflow_p)
10636 fold_overflow_warning (("assuming signed overflow does not "
10637 "occur when simplifying A / (B << N)"),
10638 WARN_STRICT_OVERFLOW_MISC);
10640 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10641 sh_cnt, build_int_cst (NULL_TREE, pow2));
10642 return fold_build2 (RSHIFT_EXPR, type,
10643 fold_convert (type, arg0), sh_cnt);
10648 case ROUND_DIV_EXPR:
10649 case CEIL_DIV_EXPR:
10650 case EXACT_DIV_EXPR:
10651 if (integer_onep (arg1))
10652 return non_lvalue (fold_convert (type, arg0));
10653 if (integer_zerop (arg1))
10655 /* X / -1 is -X. */
10656 if (!TYPE_UNSIGNED (type)
10657 && TREE_CODE (arg1) == INTEGER_CST
10658 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10659 && TREE_INT_CST_HIGH (arg1) == -1)
10660 return fold_convert (type, negate_expr (arg0));
10662 /* Convert -A / -B to A / B when the type is signed and overflow is
10664 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10665 && TREE_CODE (arg0) == NEGATE_EXPR
10666 && negate_expr_p (arg1))
10668 if (INTEGRAL_TYPE_P (type))
10669 fold_overflow_warning (("assuming signed overflow does not occur "
10670 "when distributing negation across "
10672 WARN_STRICT_OVERFLOW_MISC);
10673 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10674 negate_expr (arg1));
10676 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10677 && TREE_CODE (arg1) == NEGATE_EXPR
10678 && negate_expr_p (arg0))
10680 if (INTEGRAL_TYPE_P (type))
10681 fold_overflow_warning (("assuming signed overflow does not occur "
10682 "when distributing negation across "
10684 WARN_STRICT_OVERFLOW_MISC);
10685 return fold_build2 (code, type, negate_expr (arg0),
10686 TREE_OPERAND (arg1, 0));
10689 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10690 operation, EXACT_DIV_EXPR.
10692 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10693 At one time others generated faster code, it's not clear if they do
10694 after the last round to changes to the DIV code in expmed.c. */
10695 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10696 && multiple_of_p (type, arg0, arg1))
10697 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10699 strict_overflow_p = false;
10700 if (TREE_CODE (arg1) == INTEGER_CST
10701 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10702 &strict_overflow_p)))
10704 if (strict_overflow_p)
10705 fold_overflow_warning (("assuming signed overflow does not occur "
10706 "when simplifying division"),
10707 WARN_STRICT_OVERFLOW_MISC);
10708 return fold_convert (type, tem);
10713 case CEIL_MOD_EXPR:
10714 case FLOOR_MOD_EXPR:
10715 case ROUND_MOD_EXPR:
10716 case TRUNC_MOD_EXPR:
10717 /* X % 1 is always zero, but be sure to preserve any side
10719 if (integer_onep (arg1))
10720 return omit_one_operand (type, integer_zero_node, arg0);
10722 /* X % 0, return X % 0 unchanged so that we can get the
10723 proper warnings and errors. */
10724 if (integer_zerop (arg1))
10727 /* 0 % X is always zero, but be sure to preserve any side
10728 effects in X. Place this after checking for X == 0. */
10729 if (integer_zerop (arg0))
10730 return omit_one_operand (type, integer_zero_node, arg1);
10732 /* X % -1 is zero. */
10733 if (!TYPE_UNSIGNED (type)
10734 && TREE_CODE (arg1) == INTEGER_CST
10735 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10736 && TREE_INT_CST_HIGH (arg1) == -1)
10737 return omit_one_operand (type, integer_zero_node, arg0);
10739 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10740 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10741 strict_overflow_p = false;
10742 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10743 && (TYPE_UNSIGNED (type)
10744 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10747 /* Also optimize A % (C << N) where C is a power of 2,
10748 to A & ((C << N) - 1). */
10749 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10750 c = TREE_OPERAND (arg1, 0);
10752 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10754 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10755 build_int_cst (TREE_TYPE (arg1), 1));
10756 if (strict_overflow_p)
10757 fold_overflow_warning (("assuming signed overflow does not "
10758 "occur when simplifying "
10759 "X % (power of two)"),
10760 WARN_STRICT_OVERFLOW_MISC);
10761 return fold_build2 (BIT_AND_EXPR, type,
10762 fold_convert (type, arg0),
10763 fold_convert (type, mask));
10767 /* X % -C is the same as X % C. */
10768 if (code == TRUNC_MOD_EXPR
10769 && !TYPE_UNSIGNED (type)
10770 && TREE_CODE (arg1) == INTEGER_CST
10771 && !TREE_OVERFLOW (arg1)
10772 && TREE_INT_CST_HIGH (arg1) < 0
10773 && !TYPE_OVERFLOW_TRAPS (type)
10774 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10775 && !sign_bit_p (arg1, arg1))
10776 return fold_build2 (code, type, fold_convert (type, arg0),
10777 fold_convert (type, negate_expr (arg1)));
10779 /* X % -Y is the same as X % Y. */
10780 if (code == TRUNC_MOD_EXPR
10781 && !TYPE_UNSIGNED (type)
10782 && TREE_CODE (arg1) == NEGATE_EXPR
10783 && !TYPE_OVERFLOW_TRAPS (type))
10784 return fold_build2 (code, type, fold_convert (type, arg0),
10785 fold_convert (type, TREE_OPERAND (arg1, 0)));
10787 if (TREE_CODE (arg1) == INTEGER_CST
10788 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10789 &strict_overflow_p)))
10791 if (strict_overflow_p)
10792 fold_overflow_warning (("assuming signed overflow does not occur "
10793 "when simplifying modulos"),
10794 WARN_STRICT_OVERFLOW_MISC);
10795 return fold_convert (type, tem);
10802 if (integer_all_onesp (arg0))
10803 return omit_one_operand (type, arg0, arg1);
10807 /* Optimize -1 >> x for arithmetic right shifts. */
10808 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10809 return omit_one_operand (type, arg0, arg1);
10810 /* ... fall through ... */
10814 if (integer_zerop (arg1))
10815 return non_lvalue (fold_convert (type, arg0));
10816 if (integer_zerop (arg0))
10817 return omit_one_operand (type, arg0, arg1);
10819 /* Since negative shift count is not well-defined,
10820 don't try to compute it in the compiler. */
10821 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10824 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10825 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10826 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10827 && host_integerp (TREE_OPERAND (arg0, 1), false)
10828 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10830 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10831 + TREE_INT_CST_LOW (arg1));
10833 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10834 being well defined. */
10835 if (low >= TYPE_PRECISION (type))
10837 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10838 low = low % TYPE_PRECISION (type);
10839 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10840 return build_int_cst (type, 0);
10842 low = TYPE_PRECISION (type) - 1;
10845 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10846 build_int_cst (type, low));
10849 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10850 into x & ((unsigned)-1 >> c) for unsigned types. */
10851 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10852 || (TYPE_UNSIGNED (type)
10853 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10854 && host_integerp (arg1, false)
10855 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10856 && host_integerp (TREE_OPERAND (arg0, 1), false)
10857 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10859 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10860 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10866 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10868 lshift = build_int_cst (type, -1);
10869 lshift = int_const_binop (code, lshift, arg1, 0);
10871 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10875 /* Rewrite an LROTATE_EXPR by a constant into an
10876 RROTATE_EXPR by a new constant. */
10877 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10879 tree tem = build_int_cst (TREE_TYPE (arg1),
10880 GET_MODE_BITSIZE (TYPE_MODE (type)));
10881 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10882 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10885 /* If we have a rotate of a bit operation with the rotate count and
10886 the second operand of the bit operation both constant,
10887 permute the two operations. */
10888 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10889 && (TREE_CODE (arg0) == BIT_AND_EXPR
10890 || TREE_CODE (arg0) == BIT_IOR_EXPR
10891 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10892 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10893 return fold_build2 (TREE_CODE (arg0), type,
10894 fold_build2 (code, type,
10895 TREE_OPERAND (arg0, 0), arg1),
10896 fold_build2 (code, type,
10897 TREE_OPERAND (arg0, 1), arg1));
10899 /* Two consecutive rotates adding up to the width of the mode can
10901 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10902 && TREE_CODE (arg0) == RROTATE_EXPR
10903 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10904 && TREE_INT_CST_HIGH (arg1) == 0
10905 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10906 && ((TREE_INT_CST_LOW (arg1)
10907 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10908 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10909 return TREE_OPERAND (arg0, 0);
10914 if (operand_equal_p (arg0, arg1, 0))
10915 return omit_one_operand (type, arg0, arg1);
10916 if (INTEGRAL_TYPE_P (type)
10917 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10918 return omit_one_operand (type, arg1, arg0);
10919 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10925 if (operand_equal_p (arg0, arg1, 0))
10926 return omit_one_operand (type, arg0, arg1);
10927 if (INTEGRAL_TYPE_P (type)
10928 && TYPE_MAX_VALUE (type)
10929 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10930 return omit_one_operand (type, arg1, arg0);
10931 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10936 case TRUTH_ANDIF_EXPR:
10937 /* Note that the operands of this must be ints
10938 and their values must be 0 or 1.
10939 ("true" is a fixed value perhaps depending on the language.) */
10940 /* If first arg is constant zero, return it. */
10941 if (integer_zerop (arg0))
10942 return fold_convert (type, arg0);
10943 case TRUTH_AND_EXPR:
10944 /* If either arg is constant true, drop it. */
10945 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10946 return non_lvalue (fold_convert (type, arg1));
10947 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10948 /* Preserve sequence points. */
10949 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10950 return non_lvalue (fold_convert (type, arg0));
10951 /* If second arg is constant zero, result is zero, but first arg
10952 must be evaluated. */
10953 if (integer_zerop (arg1))
10954 return omit_one_operand (type, arg1, arg0);
10955 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10956 case will be handled here. */
10957 if (integer_zerop (arg0))
10958 return omit_one_operand (type, arg0, arg1);
10960 /* !X && X is always false. */
10961 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10962 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10963 return omit_one_operand (type, integer_zero_node, arg1);
10964 /* X && !X is always false. */
10965 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10966 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10967 return omit_one_operand (type, integer_zero_node, arg0);
10969 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10970 means A >= Y && A != MAX, but in this case we know that
10973 if (!TREE_SIDE_EFFECTS (arg0)
10974 && !TREE_SIDE_EFFECTS (arg1))
10976 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10977 if (tem && !operand_equal_p (tem, arg0, 0))
10978 return fold_build2 (code, type, tem, arg1);
10980 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10981 if (tem && !operand_equal_p (tem, arg1, 0))
10982 return fold_build2 (code, type, arg0, tem);
10986 /* We only do these simplifications if we are optimizing. */
10990 /* Check for things like (A || B) && (A || C). We can convert this
10991 to A || (B && C). Note that either operator can be any of the four
10992 truth and/or operations and the transformation will still be
10993 valid. Also note that we only care about order for the
10994 ANDIF and ORIF operators. If B contains side effects, this
10995 might change the truth-value of A. */
10996 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10997 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10998 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10999 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11000 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11001 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11003 tree a00 = TREE_OPERAND (arg0, 0);
11004 tree a01 = TREE_OPERAND (arg0, 1);
11005 tree a10 = TREE_OPERAND (arg1, 0);
11006 tree a11 = TREE_OPERAND (arg1, 1);
11007 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11008 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11009 && (code == TRUTH_AND_EXPR
11010 || code == TRUTH_OR_EXPR));
11012 if (operand_equal_p (a00, a10, 0))
11013 return fold_build2 (TREE_CODE (arg0), type, a00,
11014 fold_build2 (code, type, a01, a11));
11015 else if (commutative && operand_equal_p (a00, a11, 0))
11016 return fold_build2 (TREE_CODE (arg0), type, a00,
11017 fold_build2 (code, type, a01, a10));
11018 else if (commutative && operand_equal_p (a01, a10, 0))
11019 return fold_build2 (TREE_CODE (arg0), type, a01,
11020 fold_build2 (code, type, a00, a11));
11022 /* This case if tricky because we must either have commutative
11023 operators or else A10 must not have side-effects. */
11025 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11026 && operand_equal_p (a01, a11, 0))
11027 return fold_build2 (TREE_CODE (arg0), type,
11028 fold_build2 (code, type, a00, a10),
11032 /* See if we can build a range comparison. */
11033 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11036 /* Check for the possibility of merging component references. If our
11037 lhs is another similar operation, try to merge its rhs with our
11038 rhs. Then try to merge our lhs and rhs. */
11039 if (TREE_CODE (arg0) == code
11040 && 0 != (tem = fold_truthop (code, type,
11041 TREE_OPERAND (arg0, 1), arg1)))
11042 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11044 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11049 case TRUTH_ORIF_EXPR:
11050 /* Note that the operands of this must be ints
11051 and their values must be 0 or true.
11052 ("true" is a fixed value perhaps depending on the language.) */
11053 /* If first arg is constant true, return it. */
11054 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11055 return fold_convert (type, arg0);
11056 case TRUTH_OR_EXPR:
11057 /* If either arg is constant zero, drop it. */
11058 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11059 return non_lvalue (fold_convert (type, arg1));
11060 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11061 /* Preserve sequence points. */
11062 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11063 return non_lvalue (fold_convert (type, arg0));
11064 /* If second arg is constant true, result is true, but we must
11065 evaluate first arg. */
11066 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11067 return omit_one_operand (type, arg1, arg0);
11068 /* Likewise for first arg, but note this only occurs here for
11070 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11071 return omit_one_operand (type, arg0, arg1);
11073 /* !X || X is always true. */
11074 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11075 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11076 return omit_one_operand (type, integer_one_node, arg1);
11077 /* X || !X is always true. */
11078 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11079 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11080 return omit_one_operand (type, integer_one_node, arg0);
11084 case TRUTH_XOR_EXPR:
11085 /* If the second arg is constant zero, drop it. */
11086 if (integer_zerop (arg1))
11087 return non_lvalue (fold_convert (type, arg0));
11088 /* If the second arg is constant true, this is a logical inversion. */
11089 if (integer_onep (arg1))
11091 /* Only call invert_truthvalue if operand is a truth value. */
11092 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11093 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11095 tem = invert_truthvalue (arg0);
11096 return non_lvalue (fold_convert (type, tem));
11098 /* Identical arguments cancel to zero. */
11099 if (operand_equal_p (arg0, arg1, 0))
11100 return omit_one_operand (type, integer_zero_node, arg0);
11102 /* !X ^ X is always true. */
11103 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11105 return omit_one_operand (type, integer_one_node, arg1);
11107 /* X ^ !X is always true. */
11108 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11109 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11110 return omit_one_operand (type, integer_one_node, arg0);
11116 tem = fold_comparison (code, type, op0, op1);
11117 if (tem != NULL_TREE)
11120 /* bool_var != 0 becomes bool_var. */
11121 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11122 && code == NE_EXPR)
11123 return non_lvalue (fold_convert (type, arg0));
11125 /* bool_var == 1 becomes bool_var. */
11126 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11127 && code == EQ_EXPR)
11128 return non_lvalue (fold_convert (type, arg0));
11130 /* bool_var != 1 becomes !bool_var. */
11131 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11132 && code == NE_EXPR)
11133 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11135 /* bool_var == 0 becomes !bool_var. */
11136 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11137 && code == EQ_EXPR)
11138 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11140 /* If this is an equality comparison of the address of two non-weak,
11141 unaliased symbols neither of which are extern (since we do not
11142 have access to attributes for externs), then we know the result. */
11143 if (TREE_CODE (arg0) == ADDR_EXPR
11144 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11145 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11146 && ! lookup_attribute ("alias",
11147 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11148 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11149 && TREE_CODE (arg1) == ADDR_EXPR
11150 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11151 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11152 && ! lookup_attribute ("alias",
11153 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11154 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11156 /* We know that we're looking at the address of two
11157 non-weak, unaliased, static _DECL nodes.
11159 It is both wasteful and incorrect to call operand_equal_p
11160 to compare the two ADDR_EXPR nodes. It is wasteful in that
11161 all we need to do is test pointer equality for the arguments
11162 to the two ADDR_EXPR nodes. It is incorrect to use
11163 operand_equal_p as that function is NOT equivalent to a
11164 C equality test. It can in fact return false for two
11165 objects which would test as equal using the C equality
11167 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11168 return constant_boolean_node (equal
11169 ? code == EQ_EXPR : code != EQ_EXPR,
11173 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11174 a MINUS_EXPR of a constant, we can convert it into a comparison with
11175 a revised constant as long as no overflow occurs. */
11176 if (TREE_CODE (arg1) == INTEGER_CST
11177 && (TREE_CODE (arg0) == PLUS_EXPR
11178 || TREE_CODE (arg0) == MINUS_EXPR)
11179 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11180 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11181 ? MINUS_EXPR : PLUS_EXPR,
11182 fold_convert (TREE_TYPE (arg0), arg1),
11183 TREE_OPERAND (arg0, 1), 0))
11184 && !TREE_OVERFLOW (tem))
11185 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11187 /* Similarly for a NEGATE_EXPR. */
11188 if (TREE_CODE (arg0) == NEGATE_EXPR
11189 && TREE_CODE (arg1) == INTEGER_CST
11190 && 0 != (tem = negate_expr (arg1))
11191 && TREE_CODE (tem) == INTEGER_CST
11192 && !TREE_OVERFLOW (tem))
11193 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11195 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11196 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11197 && TREE_CODE (arg1) == INTEGER_CST
11198 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11199 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11200 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11201 fold_convert (TREE_TYPE (arg0), arg1),
11202 TREE_OPERAND (arg0, 1)));
11204 /* Transform comparisons of the form X +- C CMP X. */
11205 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11206 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11207 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11208 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11209 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11211 tree cst = TREE_OPERAND (arg0, 1);
11213 if (code == EQ_EXPR
11214 && !integer_zerop (cst))
11215 return omit_two_operands (type, boolean_false_node,
11216 TREE_OPERAND (arg0, 0), arg1);
11218 return omit_two_operands (type, boolean_true_node,
11219 TREE_OPERAND (arg0, 0), arg1);
11222 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11223 for !=. Don't do this for ordered comparisons due to overflow. */
11224 if (TREE_CODE (arg0) == MINUS_EXPR
11225 && integer_zerop (arg1))
11226 return fold_build2 (code, type,
11227 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11229 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11230 if (TREE_CODE (arg0) == ABS_EXPR
11231 && (integer_zerop (arg1) || real_zerop (arg1)))
11232 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11234 /* If this is an EQ or NE comparison with zero and ARG0 is
11235 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11236 two operations, but the latter can be done in one less insn
11237 on machines that have only two-operand insns or on which a
11238 constant cannot be the first operand. */
11239 if (TREE_CODE (arg0) == BIT_AND_EXPR
11240 && integer_zerop (arg1))
11242 tree arg00 = TREE_OPERAND (arg0, 0);
11243 tree arg01 = TREE_OPERAND (arg0, 1);
11244 if (TREE_CODE (arg00) == LSHIFT_EXPR
11245 && integer_onep (TREE_OPERAND (arg00, 0)))
11247 fold_build2 (code, type,
11248 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11249 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11250 arg01, TREE_OPERAND (arg00, 1)),
11251 fold_convert (TREE_TYPE (arg0),
11252 integer_one_node)),
11254 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11255 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11257 fold_build2 (code, type,
11258 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11259 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11260 arg00, TREE_OPERAND (arg01, 1)),
11261 fold_convert (TREE_TYPE (arg0),
11262 integer_one_node)),
11266 /* If this is an NE or EQ comparison of zero against the result of a
11267 signed MOD operation whose second operand is a power of 2, make
11268 the MOD operation unsigned since it is simpler and equivalent. */
11269 if (integer_zerop (arg1)
11270 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11271 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11272 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11273 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11274 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11275 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11277 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
11278 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11279 fold_convert (newtype,
11280 TREE_OPERAND (arg0, 0)),
11281 fold_convert (newtype,
11282 TREE_OPERAND (arg0, 1)));
11284 return fold_build2 (code, type, newmod,
11285 fold_convert (newtype, arg1));
11288 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11289 C1 is a valid shift constant, and C2 is a power of two, i.e.
11291 if (TREE_CODE (arg0) == BIT_AND_EXPR
11292 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11293 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11295 && integer_pow2p (TREE_OPERAND (arg0, 1))
11296 && integer_zerop (arg1))
11298 tree itype = TREE_TYPE (arg0);
11299 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11300 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11302 /* Check for a valid shift count. */
11303 if (TREE_INT_CST_HIGH (arg001) == 0
11304 && TREE_INT_CST_LOW (arg001) < prec)
11306 tree arg01 = TREE_OPERAND (arg0, 1);
11307 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11308 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11309 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11310 can be rewritten as (X & (C2 << C1)) != 0. */
11311 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11313 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11314 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11315 return fold_build2 (code, type, tem, arg1);
11317 /* Otherwise, for signed (arithmetic) shifts,
11318 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11319 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11320 else if (!TYPE_UNSIGNED (itype))
11321 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11322 arg000, build_int_cst (itype, 0));
11323 /* Otherwise, of unsigned (logical) shifts,
11324 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11325 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11327 return omit_one_operand (type,
11328 code == EQ_EXPR ? integer_one_node
11329 : integer_zero_node,
11334 /* If this is an NE comparison of zero with an AND of one, remove the
11335 comparison since the AND will give the correct value. */
11336 if (code == NE_EXPR
11337 && integer_zerop (arg1)
11338 && TREE_CODE (arg0) == BIT_AND_EXPR
11339 && integer_onep (TREE_OPERAND (arg0, 1)))
11340 return fold_convert (type, arg0);
11342 /* If we have (A & C) == C where C is a power of 2, convert this into
11343 (A & C) != 0. Similarly for NE_EXPR. */
11344 if (TREE_CODE (arg0) == BIT_AND_EXPR
11345 && integer_pow2p (TREE_OPERAND (arg0, 1))
11346 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11347 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11348 arg0, fold_convert (TREE_TYPE (arg0),
11349 integer_zero_node));
11351 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11352 bit, then fold the expression into A < 0 or A >= 0. */
11353 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11357 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11358 Similarly for NE_EXPR. */
11359 if (TREE_CODE (arg0) == BIT_AND_EXPR
11360 && TREE_CODE (arg1) == INTEGER_CST
11361 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11363 tree notc = fold_build1 (BIT_NOT_EXPR,
11364 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11365 TREE_OPERAND (arg0, 1));
11366 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11368 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11369 if (integer_nonzerop (dandnotc))
11370 return omit_one_operand (type, rslt, arg0);
11373 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11374 Similarly for NE_EXPR. */
11375 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11376 && TREE_CODE (arg1) == INTEGER_CST
11377 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11379 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11380 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11381 TREE_OPERAND (arg0, 1), notd);
11382 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11383 if (integer_nonzerop (candnotd))
11384 return omit_one_operand (type, rslt, arg0);
11387 /* If this is a comparison of a field, we may be able to simplify it. */
11388 if ((TREE_CODE (arg0) == COMPONENT_REF
11389 || TREE_CODE (arg0) == BIT_FIELD_REF)
11390 /* Handle the constant case even without -O
11391 to make sure the warnings are given. */
11392 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11394 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11399 /* Optimize comparisons of strlen vs zero to a compare of the
11400 first character of the string vs zero. To wit,
11401 strlen(ptr) == 0 => *ptr == 0
11402 strlen(ptr) != 0 => *ptr != 0
11403 Other cases should reduce to one of these two (or a constant)
11404 due to the return value of strlen being unsigned. */
11405 if (TREE_CODE (arg0) == CALL_EXPR
11406 && integer_zerop (arg1))
11408 tree fndecl = get_callee_fndecl (arg0);
11411 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11412 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11413 && call_expr_nargs (arg0) == 1
11414 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11416 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11417 return fold_build2 (code, type, iref,
11418 build_int_cst (TREE_TYPE (iref), 0));
11422 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11423 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11424 if (TREE_CODE (arg0) == RSHIFT_EXPR
11425 && integer_zerop (arg1)
11426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11428 tree arg00 = TREE_OPERAND (arg0, 0);
11429 tree arg01 = TREE_OPERAND (arg0, 1);
11430 tree itype = TREE_TYPE (arg00);
11431 if (TREE_INT_CST_HIGH (arg01) == 0
11432 && TREE_INT_CST_LOW (arg01)
11433 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11435 if (TYPE_UNSIGNED (itype))
11437 itype = lang_hooks.types.signed_type (itype);
11438 arg00 = fold_convert (itype, arg00);
11440 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11441 type, arg00, build_int_cst (itype, 0));
11445 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11446 if (integer_zerop (arg1)
11447 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11448 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11449 TREE_OPERAND (arg0, 1));
11451 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11452 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11453 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11454 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11455 build_int_cst (TREE_TYPE (arg1), 0));
11456 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11457 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11458 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11459 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11460 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11461 build_int_cst (TREE_TYPE (arg1), 0));
11463 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11464 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11465 && TREE_CODE (arg1) == INTEGER_CST
11466 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11467 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11468 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11469 TREE_OPERAND (arg0, 1), arg1));
11471 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11472 (X & C) == 0 when C is a single bit. */
11473 if (TREE_CODE (arg0) == BIT_AND_EXPR
11474 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11475 && integer_zerop (arg1)
11476 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11478 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11479 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11480 TREE_OPERAND (arg0, 1));
11481 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11485 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11486 constant C is a power of two, i.e. a single bit. */
11487 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11488 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11489 && integer_zerop (arg1)
11490 && integer_pow2p (TREE_OPERAND (arg0, 1))
11491 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11492 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11494 tree arg00 = TREE_OPERAND (arg0, 0);
11495 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11496 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11499 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11500 when is C is a power of two, i.e. a single bit. */
11501 if (TREE_CODE (arg0) == BIT_AND_EXPR
11502 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11503 && integer_zerop (arg1)
11504 && integer_pow2p (TREE_OPERAND (arg0, 1))
11505 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11506 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11508 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11509 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11510 arg000, TREE_OPERAND (arg0, 1));
11511 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11512 tem, build_int_cst (TREE_TYPE (tem), 0));
11515 if (integer_zerop (arg1)
11516 && tree_expr_nonzero_p (arg0))
11518 tree res = constant_boolean_node (code==NE_EXPR, type);
11519 return omit_one_operand (type, res, arg0);
11522 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11523 if (TREE_CODE (arg0) == NEGATE_EXPR
11524 && TREE_CODE (arg1) == NEGATE_EXPR)
11525 return fold_build2 (code, type,
11526 TREE_OPERAND (arg0, 0),
11527 TREE_OPERAND (arg1, 0));
11529 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11530 if (TREE_CODE (arg0) == BIT_AND_EXPR
11531 && TREE_CODE (arg1) == BIT_AND_EXPR)
11533 tree arg00 = TREE_OPERAND (arg0, 0);
11534 tree arg01 = TREE_OPERAND (arg0, 1);
11535 tree arg10 = TREE_OPERAND (arg1, 0);
11536 tree arg11 = TREE_OPERAND (arg1, 1);
11537 tree itype = TREE_TYPE (arg0);
11539 if (operand_equal_p (arg01, arg11, 0))
11540 return fold_build2 (code, type,
11541 fold_build2 (BIT_AND_EXPR, itype,
11542 fold_build2 (BIT_XOR_EXPR, itype,
11545 build_int_cst (itype, 0));
11547 if (operand_equal_p (arg01, arg10, 0))
11548 return fold_build2 (code, type,
11549 fold_build2 (BIT_AND_EXPR, itype,
11550 fold_build2 (BIT_XOR_EXPR, itype,
11553 build_int_cst (itype, 0));
11555 if (operand_equal_p (arg00, arg11, 0))
11556 return fold_build2 (code, type,
11557 fold_build2 (BIT_AND_EXPR, itype,
11558 fold_build2 (BIT_XOR_EXPR, itype,
11561 build_int_cst (itype, 0));
11563 if (operand_equal_p (arg00, arg10, 0))
11564 return fold_build2 (code, type,
11565 fold_build2 (BIT_AND_EXPR, itype,
11566 fold_build2 (BIT_XOR_EXPR, itype,
11569 build_int_cst (itype, 0));
11572 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11573 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11575 tree arg00 = TREE_OPERAND (arg0, 0);
11576 tree arg01 = TREE_OPERAND (arg0, 1);
11577 tree arg10 = TREE_OPERAND (arg1, 0);
11578 tree arg11 = TREE_OPERAND (arg1, 1);
11579 tree itype = TREE_TYPE (arg0);
11581 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11582 operand_equal_p guarantees no side-effects so we don't need
11583 to use omit_one_operand on Z. */
11584 if (operand_equal_p (arg01, arg11, 0))
11585 return fold_build2 (code, type, arg00, arg10);
11586 if (operand_equal_p (arg01, arg10, 0))
11587 return fold_build2 (code, type, arg00, arg11);
11588 if (operand_equal_p (arg00, arg11, 0))
11589 return fold_build2 (code, type, arg01, arg10);
11590 if (operand_equal_p (arg00, arg10, 0))
11591 return fold_build2 (code, type, arg01, arg11);
11593 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11594 if (TREE_CODE (arg01) == INTEGER_CST
11595 && TREE_CODE (arg11) == INTEGER_CST)
11596 return fold_build2 (code, type,
11597 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11598 fold_build2 (BIT_XOR_EXPR, itype,
11603 /* Attempt to simplify equality/inequality comparisons of complex
11604 values. Only lower the comparison if the result is known or
11605 can be simplified to a single scalar comparison. */
11606 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11607 || TREE_CODE (arg0) == COMPLEX_CST)
11608 && (TREE_CODE (arg1) == COMPLEX_EXPR
11609 || TREE_CODE (arg1) == COMPLEX_CST))
11611 tree real0, imag0, real1, imag1;
11614 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11616 real0 = TREE_OPERAND (arg0, 0);
11617 imag0 = TREE_OPERAND (arg0, 1);
11621 real0 = TREE_REALPART (arg0);
11622 imag0 = TREE_IMAGPART (arg0);
11625 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11627 real1 = TREE_OPERAND (arg1, 0);
11628 imag1 = TREE_OPERAND (arg1, 1);
11632 real1 = TREE_REALPART (arg1);
11633 imag1 = TREE_IMAGPART (arg1);
11636 rcond = fold_binary (code, type, real0, real1);
11637 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11639 if (integer_zerop (rcond))
11641 if (code == EQ_EXPR)
11642 return omit_two_operands (type, boolean_false_node,
11644 return fold_build2 (NE_EXPR, type, imag0, imag1);
11648 if (code == NE_EXPR)
11649 return omit_two_operands (type, boolean_true_node,
11651 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11655 icond = fold_binary (code, type, imag0, imag1);
11656 if (icond && TREE_CODE (icond) == INTEGER_CST)
11658 if (integer_zerop (icond))
11660 if (code == EQ_EXPR)
11661 return omit_two_operands (type, boolean_false_node,
11663 return fold_build2 (NE_EXPR, type, real0, real1);
11667 if (code == NE_EXPR)
11668 return omit_two_operands (type, boolean_true_node,
11670 return fold_build2 (EQ_EXPR, type, real0, real1);
11681 tem = fold_comparison (code, type, op0, op1);
11682 if (tem != NULL_TREE)
11685 /* Transform comparisons of the form X +- C CMP X. */
11686 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11688 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11689 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11690 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11691 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11693 tree arg01 = TREE_OPERAND (arg0, 1);
11694 enum tree_code code0 = TREE_CODE (arg0);
11697 if (TREE_CODE (arg01) == REAL_CST)
11698 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11700 is_positive = tree_int_cst_sgn (arg01);
11702 /* (X - c) > X becomes false. */
11703 if (code == GT_EXPR
11704 && ((code0 == MINUS_EXPR && is_positive >= 0)
11705 || (code0 == PLUS_EXPR && is_positive <= 0)))
11707 if (TREE_CODE (arg01) == INTEGER_CST
11708 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11709 fold_overflow_warning (("assuming signed overflow does not "
11710 "occur when assuming that (X - c) > X "
11711 "is always false"),
11712 WARN_STRICT_OVERFLOW_ALL);
11713 return constant_boolean_node (0, type);
11716 /* Likewise (X + c) < X becomes false. */
11717 if (code == LT_EXPR
11718 && ((code0 == PLUS_EXPR && is_positive >= 0)
11719 || (code0 == MINUS_EXPR && is_positive <= 0)))
11721 if (TREE_CODE (arg01) == INTEGER_CST
11722 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11723 fold_overflow_warning (("assuming signed overflow does not "
11724 "occur when assuming that "
11725 "(X + c) < X is always false"),
11726 WARN_STRICT_OVERFLOW_ALL);
11727 return constant_boolean_node (0, type);
11730 /* Convert (X - c) <= X to true. */
11731 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11733 && ((code0 == MINUS_EXPR && is_positive >= 0)
11734 || (code0 == PLUS_EXPR && is_positive <= 0)))
11736 if (TREE_CODE (arg01) == INTEGER_CST
11737 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11738 fold_overflow_warning (("assuming signed overflow does not "
11739 "occur when assuming that "
11740 "(X - c) <= X is always true"),
11741 WARN_STRICT_OVERFLOW_ALL);
11742 return constant_boolean_node (1, type);
11745 /* Convert (X + c) >= X to true. */
11746 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11748 && ((code0 == PLUS_EXPR && is_positive >= 0)
11749 || (code0 == MINUS_EXPR && is_positive <= 0)))
11751 if (TREE_CODE (arg01) == INTEGER_CST
11752 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11753 fold_overflow_warning (("assuming signed overflow does not "
11754 "occur when assuming that "
11755 "(X + c) >= X is always true"),
11756 WARN_STRICT_OVERFLOW_ALL);
11757 return constant_boolean_node (1, type);
11760 if (TREE_CODE (arg01) == INTEGER_CST)
11762 /* Convert X + c > X and X - c < X to true for integers. */
11763 if (code == GT_EXPR
11764 && ((code0 == PLUS_EXPR && is_positive > 0)
11765 || (code0 == MINUS_EXPR && is_positive < 0)))
11767 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11768 fold_overflow_warning (("assuming signed overflow does "
11769 "not occur when assuming that "
11770 "(X + c) > X is always true"),
11771 WARN_STRICT_OVERFLOW_ALL);
11772 return constant_boolean_node (1, type);
11775 if (code == LT_EXPR
11776 && ((code0 == MINUS_EXPR && is_positive > 0)
11777 || (code0 == PLUS_EXPR && is_positive < 0)))
11779 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11780 fold_overflow_warning (("assuming signed overflow does "
11781 "not occur when assuming that "
11782 "(X - c) < X is always true"),
11783 WARN_STRICT_OVERFLOW_ALL);
11784 return constant_boolean_node (1, type);
11787 /* Convert X + c <= X and X - c >= X to false for integers. */
11788 if (code == LE_EXPR
11789 && ((code0 == PLUS_EXPR && is_positive > 0)
11790 || (code0 == MINUS_EXPR && is_positive < 0)))
11792 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11793 fold_overflow_warning (("assuming signed overflow does "
11794 "not occur when assuming that "
11795 "(X + c) <= X is always false"),
11796 WARN_STRICT_OVERFLOW_ALL);
11797 return constant_boolean_node (0, type);
11800 if (code == GE_EXPR
11801 && ((code0 == MINUS_EXPR && is_positive > 0)
11802 || (code0 == PLUS_EXPR && is_positive < 0)))
11804 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11805 fold_overflow_warning (("assuming signed overflow does "
11806 "not occur when assuming that "
11807 "(X - c) >= X is always false"),
11808 WARN_STRICT_OVERFLOW_ALL);
11809 return constant_boolean_node (0, type);
11814 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11815 This transformation affects the cases which are handled in later
11816 optimizations involving comparisons with non-negative constants. */
11817 if (TREE_CODE (arg1) == INTEGER_CST
11818 && TREE_CODE (arg0) != INTEGER_CST
11819 && tree_int_cst_sgn (arg1) > 0)
11821 if (code == GE_EXPR)
11823 arg1 = const_binop (MINUS_EXPR, arg1,
11824 build_int_cst (TREE_TYPE (arg1), 1), 0);
11825 return fold_build2 (GT_EXPR, type, arg0,
11826 fold_convert (TREE_TYPE (arg0), arg1));
11828 if (code == LT_EXPR)
11830 arg1 = const_binop (MINUS_EXPR, arg1,
11831 build_int_cst (TREE_TYPE (arg1), 1), 0);
11832 return fold_build2 (LE_EXPR, type, arg0,
11833 fold_convert (TREE_TYPE (arg0), arg1));
11837 /* Comparisons with the highest or lowest possible integer of
11838 the specified precision will have known values. */
11840 tree arg1_type = TREE_TYPE (arg1);
11841 unsigned int width = TYPE_PRECISION (arg1_type);
11843 if (TREE_CODE (arg1) == INTEGER_CST
11844 && !TREE_OVERFLOW (arg1)
11845 && width <= 2 * HOST_BITS_PER_WIDE_INT
11846 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11848 HOST_WIDE_INT signed_max_hi;
11849 unsigned HOST_WIDE_INT signed_max_lo;
11850 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11852 if (width <= HOST_BITS_PER_WIDE_INT)
11854 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11859 if (TYPE_UNSIGNED (arg1_type))
11861 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11867 max_lo = signed_max_lo;
11868 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11874 width -= HOST_BITS_PER_WIDE_INT;
11875 signed_max_lo = -1;
11876 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11881 if (TYPE_UNSIGNED (arg1_type))
11883 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11888 max_hi = signed_max_hi;
11889 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11893 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11894 && TREE_INT_CST_LOW (arg1) == max_lo)
11898 return omit_one_operand (type, integer_zero_node, arg0);
11901 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11904 return omit_one_operand (type, integer_one_node, arg0);
11907 return fold_build2 (NE_EXPR, type, arg0, arg1);
11909 /* The GE_EXPR and LT_EXPR cases above are not normally
11910 reached because of previous transformations. */
11915 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11917 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11921 arg1 = const_binop (PLUS_EXPR, arg1,
11922 build_int_cst (TREE_TYPE (arg1), 1), 0);
11923 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11925 arg1 = const_binop (PLUS_EXPR, arg1,
11926 build_int_cst (TREE_TYPE (arg1), 1), 0);
11927 return fold_build2 (NE_EXPR, type, arg0, arg1);
11931 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11933 && TREE_INT_CST_LOW (arg1) == min_lo)
11937 return omit_one_operand (type, integer_zero_node, arg0);
11940 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11943 return omit_one_operand (type, integer_one_node, arg0);
11946 return fold_build2 (NE_EXPR, type, op0, op1);
11951 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11953 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11957 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11958 return fold_build2 (NE_EXPR, type, arg0, arg1);
11960 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11961 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11966 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11967 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11968 && TYPE_UNSIGNED (arg1_type)
11969 /* We will flip the signedness of the comparison operator
11970 associated with the mode of arg1, so the sign bit is
11971 specified by this mode. Check that arg1 is the signed
11972 max associated with this sign bit. */
11973 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11974 /* signed_type does not work on pointer types. */
11975 && INTEGRAL_TYPE_P (arg1_type))
11977 /* The following case also applies to X < signed_max+1
11978 and X >= signed_max+1 because previous transformations. */
11979 if (code == LE_EXPR || code == GT_EXPR)
11982 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11983 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11984 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11985 type, fold_convert (st0, arg0),
11986 build_int_cst (st1, 0));
11992 /* If we are comparing an ABS_EXPR with a constant, we can
11993 convert all the cases into explicit comparisons, but they may
11994 well not be faster than doing the ABS and one comparison.
11995 But ABS (X) <= C is a range comparison, which becomes a subtraction
11996 and a comparison, and is probably faster. */
11997 if (code == LE_EXPR
11998 && TREE_CODE (arg1) == INTEGER_CST
11999 && TREE_CODE (arg0) == ABS_EXPR
12000 && ! TREE_SIDE_EFFECTS (arg0)
12001 && (0 != (tem = negate_expr (arg1)))
12002 && TREE_CODE (tem) == INTEGER_CST
12003 && !TREE_OVERFLOW (tem))
12004 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12005 build2 (GE_EXPR, type,
12006 TREE_OPERAND (arg0, 0), tem),
12007 build2 (LE_EXPR, type,
12008 TREE_OPERAND (arg0, 0), arg1));
12010 /* Convert ABS_EXPR<x> >= 0 to true. */
12011 strict_overflow_p = false;
12012 if (code == GE_EXPR
12013 && (integer_zerop (arg1)
12014 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12015 && real_zerop (arg1)))
12016 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12018 if (strict_overflow_p)
12019 fold_overflow_warning (("assuming signed overflow does not occur "
12020 "when simplifying comparison of "
12021 "absolute value and zero"),
12022 WARN_STRICT_OVERFLOW_CONDITIONAL);
12023 return omit_one_operand (type, integer_one_node, arg0);
12026 /* Convert ABS_EXPR<x> < 0 to false. */
12027 strict_overflow_p = false;
12028 if (code == LT_EXPR
12029 && (integer_zerop (arg1) || real_zerop (arg1))
12030 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12032 if (strict_overflow_p)
12033 fold_overflow_warning (("assuming signed overflow does not occur "
12034 "when simplifying comparison of "
12035 "absolute value and zero"),
12036 WARN_STRICT_OVERFLOW_CONDITIONAL);
12037 return omit_one_operand (type, integer_zero_node, arg0);
12040 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12041 and similarly for >= into !=. */
12042 if ((code == LT_EXPR || code == GE_EXPR)
12043 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12044 && TREE_CODE (arg1) == LSHIFT_EXPR
12045 && integer_onep (TREE_OPERAND (arg1, 0)))
12046 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12047 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12048 TREE_OPERAND (arg1, 1)),
12049 build_int_cst (TREE_TYPE (arg0), 0));
12051 if ((code == LT_EXPR || code == GE_EXPR)
12052 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12053 && (TREE_CODE (arg1) == NOP_EXPR
12054 || TREE_CODE (arg1) == CONVERT_EXPR)
12055 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12056 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12058 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12059 fold_convert (TREE_TYPE (arg0),
12060 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12061 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12063 build_int_cst (TREE_TYPE (arg0), 0));
12067 case UNORDERED_EXPR:
12075 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12077 t1 = fold_relational_const (code, type, arg0, arg1);
12078 if (t1 != NULL_TREE)
12082 /* If the first operand is NaN, the result is constant. */
12083 if (TREE_CODE (arg0) == REAL_CST
12084 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12085 && (code != LTGT_EXPR || ! flag_trapping_math))
12087 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12088 ? integer_zero_node
12089 : integer_one_node;
12090 return omit_one_operand (type, t1, arg1);
12093 /* If the second operand is NaN, the result is constant. */
12094 if (TREE_CODE (arg1) == REAL_CST
12095 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12096 && (code != LTGT_EXPR || ! flag_trapping_math))
12098 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12099 ? integer_zero_node
12100 : integer_one_node;
12101 return omit_one_operand (type, t1, arg0);
12104 /* Simplify unordered comparison of something with itself. */
12105 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12106 && operand_equal_p (arg0, arg1, 0))
12107 return constant_boolean_node (1, type);
12109 if (code == LTGT_EXPR
12110 && !flag_trapping_math
12111 && operand_equal_p (arg0, arg1, 0))
12112 return constant_boolean_node (0, type);
12114 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12116 tree targ0 = strip_float_extensions (arg0);
12117 tree targ1 = strip_float_extensions (arg1);
12118 tree newtype = TREE_TYPE (targ0);
12120 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12121 newtype = TREE_TYPE (targ1);
12123 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12124 return fold_build2 (code, type, fold_convert (newtype, targ0),
12125 fold_convert (newtype, targ1));
12130 case COMPOUND_EXPR:
12131 /* When pedantic, a compound expression can be neither an lvalue
12132 nor an integer constant expression. */
12133 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12135 /* Don't let (0, 0) be null pointer constant. */
12136 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12137 : fold_convert (type, arg1);
12138 return pedantic_non_lvalue (tem);
12141 if ((TREE_CODE (arg0) == REAL_CST
12142 && TREE_CODE (arg1) == REAL_CST)
12143 || (TREE_CODE (arg0) == INTEGER_CST
12144 && TREE_CODE (arg1) == INTEGER_CST))
12145 return build_complex (type, arg0, arg1);
12149 /* An ASSERT_EXPR should never be passed to fold_binary. */
12150 gcc_unreachable ();
12154 } /* switch (code) */
12157 /* Callback for walk_tree, looking for LABEL_EXPR.
12158 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12159 Do not check the sub-tree of GOTO_EXPR. */
12162 contains_label_1 (tree *tp,
12163 int *walk_subtrees,
12164 void *data ATTRIBUTE_UNUSED)
12166 switch (TREE_CODE (*tp))
12171 *walk_subtrees = 0;
12178 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12179 accessible from outside the sub-tree. Returns NULL_TREE if no
12180 addressable label is found. */
12183 contains_label_p (tree st)
12185 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12188 /* Fold a ternary expression of code CODE and type TYPE with operands
12189 OP0, OP1, and OP2. Return the folded expression if folding is
12190 successful. Otherwise, return NULL_TREE. */
12193 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12196 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12197 enum tree_code_class kind = TREE_CODE_CLASS (code);
12199 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12200 && TREE_CODE_LENGTH (code) == 3);
12202 /* Strip any conversions that don't change the mode. This is safe
12203 for every expression, except for a comparison expression because
12204 its signedness is derived from its operands. So, in the latter
12205 case, only strip conversions that don't change the signedness.
12207 Note that this is done as an internal manipulation within the
12208 constant folder, in order to find the simplest representation of
12209 the arguments so that their form can be studied. In any cases,
12210 the appropriate type conversions should be put back in the tree
12211 that will get out of the constant folder. */
12226 case COMPONENT_REF:
12227 if (TREE_CODE (arg0) == CONSTRUCTOR
12228 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12230 unsigned HOST_WIDE_INT idx;
12232 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12239 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12240 so all simple results must be passed through pedantic_non_lvalue. */
12241 if (TREE_CODE (arg0) == INTEGER_CST)
12243 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12244 tem = integer_zerop (arg0) ? op2 : op1;
12245 /* Only optimize constant conditions when the selected branch
12246 has the same type as the COND_EXPR. This avoids optimizing
12247 away "c ? x : throw", where the throw has a void type.
12248 Avoid throwing away that operand which contains label. */
12249 if ((!TREE_SIDE_EFFECTS (unused_op)
12250 || !contains_label_p (unused_op))
12251 && (! VOID_TYPE_P (TREE_TYPE (tem))
12252 || VOID_TYPE_P (type)))
12253 return pedantic_non_lvalue (tem);
12256 if (operand_equal_p (arg1, op2, 0))
12257 return pedantic_omit_one_operand (type, arg1, arg0);
12259 /* If we have A op B ? A : C, we may be able to convert this to a
12260 simpler expression, depending on the operation and the values
12261 of B and C. Signed zeros prevent all of these transformations,
12262 for reasons given above each one.
12264 Also try swapping the arguments and inverting the conditional. */
12265 if (COMPARISON_CLASS_P (arg0)
12266 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12267 arg1, TREE_OPERAND (arg0, 1))
12268 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12270 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12275 if (COMPARISON_CLASS_P (arg0)
12276 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12278 TREE_OPERAND (arg0, 1))
12279 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12281 tem = fold_truth_not_expr (arg0);
12282 if (tem && COMPARISON_CLASS_P (tem))
12284 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12290 /* If the second operand is simpler than the third, swap them
12291 since that produces better jump optimization results. */
12292 if (truth_value_p (TREE_CODE (arg0))
12293 && tree_swap_operands_p (op1, op2, false))
12295 /* See if this can be inverted. If it can't, possibly because
12296 it was a floating-point inequality comparison, don't do
12298 tem = fold_truth_not_expr (arg0);
12300 return fold_build3 (code, type, tem, op2, op1);
12303 /* Convert A ? 1 : 0 to simply A. */
12304 if (integer_onep (op1)
12305 && integer_zerop (op2)
12306 /* If we try to convert OP0 to our type, the
12307 call to fold will try to move the conversion inside
12308 a COND, which will recurse. In that case, the COND_EXPR
12309 is probably the best choice, so leave it alone. */
12310 && type == TREE_TYPE (arg0))
12311 return pedantic_non_lvalue (arg0);
12313 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12314 over COND_EXPR in cases such as floating point comparisons. */
12315 if (integer_zerop (op1)
12316 && integer_onep (op2)
12317 && truth_value_p (TREE_CODE (arg0)))
12318 return pedantic_non_lvalue (fold_convert (type,
12319 invert_truthvalue (arg0)));
12321 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12322 if (TREE_CODE (arg0) == LT_EXPR
12323 && integer_zerop (TREE_OPERAND (arg0, 1))
12324 && integer_zerop (op2)
12325 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12327 /* sign_bit_p only checks ARG1 bits within A's precision.
12328 If <sign bit of A> has wider type than A, bits outside
12329 of A's precision in <sign bit of A> need to be checked.
12330 If they are all 0, this optimization needs to be done
12331 in unsigned A's type, if they are all 1 in signed A's type,
12332 otherwise this can't be done. */
12333 if (TYPE_PRECISION (TREE_TYPE (tem))
12334 < TYPE_PRECISION (TREE_TYPE (arg1))
12335 && TYPE_PRECISION (TREE_TYPE (tem))
12336 < TYPE_PRECISION (type))
12338 unsigned HOST_WIDE_INT mask_lo;
12339 HOST_WIDE_INT mask_hi;
12340 int inner_width, outer_width;
12343 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12344 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12345 if (outer_width > TYPE_PRECISION (type))
12346 outer_width = TYPE_PRECISION (type);
12348 if (outer_width > HOST_BITS_PER_WIDE_INT)
12350 mask_hi = ((unsigned HOST_WIDE_INT) -1
12351 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12357 mask_lo = ((unsigned HOST_WIDE_INT) -1
12358 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12360 if (inner_width > HOST_BITS_PER_WIDE_INT)
12362 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12363 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12367 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12368 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12370 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12371 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12373 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
12374 tem = fold_convert (tem_type, tem);
12376 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12377 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12379 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
12380 tem = fold_convert (tem_type, tem);
12387 return fold_convert (type,
12388 fold_build2 (BIT_AND_EXPR,
12389 TREE_TYPE (tem), tem,
12390 fold_convert (TREE_TYPE (tem),
12394 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12395 already handled above. */
12396 if (TREE_CODE (arg0) == BIT_AND_EXPR
12397 && integer_onep (TREE_OPERAND (arg0, 1))
12398 && integer_zerop (op2)
12399 && integer_pow2p (arg1))
12401 tree tem = TREE_OPERAND (arg0, 0);
12403 if (TREE_CODE (tem) == RSHIFT_EXPR
12404 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12405 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12406 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12407 return fold_build2 (BIT_AND_EXPR, type,
12408 TREE_OPERAND (tem, 0), arg1);
12411 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12412 is probably obsolete because the first operand should be a
12413 truth value (that's why we have the two cases above), but let's
12414 leave it in until we can confirm this for all front-ends. */
12415 if (integer_zerop (op2)
12416 && TREE_CODE (arg0) == NE_EXPR
12417 && integer_zerop (TREE_OPERAND (arg0, 1))
12418 && integer_pow2p (arg1)
12419 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12420 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12421 arg1, OEP_ONLY_CONST))
12422 return pedantic_non_lvalue (fold_convert (type,
12423 TREE_OPERAND (arg0, 0)));
12425 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12426 if (integer_zerop (op2)
12427 && truth_value_p (TREE_CODE (arg0))
12428 && truth_value_p (TREE_CODE (arg1)))
12429 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12430 fold_convert (type, arg0),
12433 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12434 if (integer_onep (op2)
12435 && truth_value_p (TREE_CODE (arg0))
12436 && truth_value_p (TREE_CODE (arg1)))
12438 /* Only perform transformation if ARG0 is easily inverted. */
12439 tem = fold_truth_not_expr (arg0);
12441 return fold_build2 (TRUTH_ORIF_EXPR, type,
12442 fold_convert (type, tem),
12446 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12447 if (integer_zerop (arg1)
12448 && truth_value_p (TREE_CODE (arg0))
12449 && truth_value_p (TREE_CODE (op2)))
12451 /* Only perform transformation if ARG0 is easily inverted. */
12452 tem = fold_truth_not_expr (arg0);
12454 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12455 fold_convert (type, tem),
12459 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12460 if (integer_onep (arg1)
12461 && truth_value_p (TREE_CODE (arg0))
12462 && truth_value_p (TREE_CODE (op2)))
12463 return fold_build2 (TRUTH_ORIF_EXPR, type,
12464 fold_convert (type, arg0),
12470 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12471 of fold_ternary on them. */
12472 gcc_unreachable ();
12474 case BIT_FIELD_REF:
12475 if ((TREE_CODE (arg0) == VECTOR_CST
12476 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12477 && type == TREE_TYPE (TREE_TYPE (arg0))
12478 && host_integerp (arg1, 1)
12479 && host_integerp (op2, 1))
12481 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12482 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12485 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12486 && (idx % width) == 0
12487 && (idx = idx / width)
12488 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12490 tree elements = NULL_TREE;
12492 if (TREE_CODE (arg0) == VECTOR_CST)
12493 elements = TREE_VECTOR_CST_ELTS (arg0);
12496 unsigned HOST_WIDE_INT idx;
12499 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12500 elements = tree_cons (NULL_TREE, value, elements);
12502 while (idx-- > 0 && elements)
12503 elements = TREE_CHAIN (elements);
12505 return TREE_VALUE (elements);
12507 return fold_convert (type, integer_zero_node);
12514 } /* switch (code) */
12517 /* Perform constant folding and related simplification of EXPR.
12518 The related simplifications include x*1 => x, x*0 => 0, etc.,
12519 and application of the associative law.
12520 NOP_EXPR conversions may be removed freely (as long as we
12521 are careful not to change the type of the overall expression).
12522 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12523 but we can constant-fold them if they have constant operands. */
12525 #ifdef ENABLE_FOLD_CHECKING
12526 # define fold(x) fold_1 (x)
12527 static tree fold_1 (tree);
12533 const tree t = expr;
12534 enum tree_code code = TREE_CODE (t);
12535 enum tree_code_class kind = TREE_CODE_CLASS (code);
12538 /* Return right away if a constant. */
12539 if (kind == tcc_constant)
12542 /* CALL_EXPR-like objects with variable numbers of operands are
12543 treated specially. */
12544 if (kind == tcc_vl_exp)
12546 if (code == CALL_EXPR)
12548 tem = fold_call_expr (expr, false);
12549 return tem ? tem : expr;
12554 if (IS_EXPR_CODE_CLASS (kind)
12555 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12557 tree type = TREE_TYPE (t);
12558 tree op0, op1, op2;
12560 switch (TREE_CODE_LENGTH (code))
12563 op0 = TREE_OPERAND (t, 0);
12564 tem = fold_unary (code, type, op0);
12565 return tem ? tem : expr;
12567 op0 = TREE_OPERAND (t, 0);
12568 op1 = TREE_OPERAND (t, 1);
12569 tem = fold_binary (code, type, op0, op1);
12570 return tem ? tem : expr;
12572 op0 = TREE_OPERAND (t, 0);
12573 op1 = TREE_OPERAND (t, 1);
12574 op2 = TREE_OPERAND (t, 2);
12575 tem = fold_ternary (code, type, op0, op1, op2);
12576 return tem ? tem : expr;
12585 return fold (DECL_INITIAL (t));
12589 } /* switch (code) */
12592 #ifdef ENABLE_FOLD_CHECKING
12595 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12596 static void fold_check_failed (tree, tree);
12597 void print_fold_checksum (tree);
12599 /* When --enable-checking=fold, compute a digest of expr before
12600 and after actual fold call to see if fold did not accidentally
12601 change original expr. */
12607 struct md5_ctx ctx;
12608 unsigned char checksum_before[16], checksum_after[16];
12611 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12612 md5_init_ctx (&ctx);
12613 fold_checksum_tree (expr, &ctx, ht);
12614 md5_finish_ctx (&ctx, checksum_before);
12617 ret = fold_1 (expr);
12619 md5_init_ctx (&ctx);
12620 fold_checksum_tree (expr, &ctx, ht);
12621 md5_finish_ctx (&ctx, checksum_after);
12624 if (memcmp (checksum_before, checksum_after, 16))
12625 fold_check_failed (expr, ret);
12631 print_fold_checksum (tree expr)
12633 struct md5_ctx ctx;
12634 unsigned char checksum[16], cnt;
12637 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12638 md5_init_ctx (&ctx);
12639 fold_checksum_tree (expr, &ctx, ht);
12640 md5_finish_ctx (&ctx, checksum);
12642 for (cnt = 0; cnt < 16; ++cnt)
12643 fprintf (stderr, "%02x", checksum[cnt]);
12644 putc ('\n', stderr);
12648 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12650 internal_error ("fold check: original tree changed by fold");
12654 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12657 enum tree_code code;
12658 struct tree_function_decl buf;
12663 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12664 <= sizeof (struct tree_function_decl))
12665 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12668 slot = htab_find_slot (ht, expr, INSERT);
12672 code = TREE_CODE (expr);
12673 if (TREE_CODE_CLASS (code) == tcc_declaration
12674 && DECL_ASSEMBLER_NAME_SET_P (expr))
12676 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12677 memcpy ((char *) &buf, expr, tree_size (expr));
12678 expr = (tree) &buf;
12679 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12681 else if (TREE_CODE_CLASS (code) == tcc_type
12682 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12683 || TYPE_CACHED_VALUES_P (expr)
12684 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12686 /* Allow these fields to be modified. */
12687 memcpy ((char *) &buf, expr, tree_size (expr));
12688 expr = (tree) &buf;
12689 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12690 TYPE_POINTER_TO (expr) = NULL;
12691 TYPE_REFERENCE_TO (expr) = NULL;
12692 if (TYPE_CACHED_VALUES_P (expr))
12694 TYPE_CACHED_VALUES_P (expr) = 0;
12695 TYPE_CACHED_VALUES (expr) = NULL;
12698 md5_process_bytes (expr, tree_size (expr), ctx);
12699 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12700 if (TREE_CODE_CLASS (code) != tcc_type
12701 && TREE_CODE_CLASS (code) != tcc_declaration
12702 && code != TREE_LIST)
12703 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12704 switch (TREE_CODE_CLASS (code))
12710 md5_process_bytes (TREE_STRING_POINTER (expr),
12711 TREE_STRING_LENGTH (expr), ctx);
12714 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12715 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12718 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12724 case tcc_exceptional:
12728 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12729 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12730 expr = TREE_CHAIN (expr);
12731 goto recursive_label;
12734 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12735 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12741 case tcc_expression:
12742 case tcc_reference:
12743 case tcc_comparison:
12746 case tcc_statement:
12748 len = TREE_OPERAND_LENGTH (expr);
12749 for (i = 0; i < len; ++i)
12750 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12752 case tcc_declaration:
12753 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12754 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12755 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12757 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12758 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12759 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12760 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12761 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12763 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12764 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12766 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12768 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12769 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12770 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12774 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12775 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12776 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12777 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12778 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12779 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12780 if (INTEGRAL_TYPE_P (expr)
12781 || SCALAR_FLOAT_TYPE_P (expr))
12783 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12784 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12786 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12787 if (TREE_CODE (expr) == RECORD_TYPE
12788 || TREE_CODE (expr) == UNION_TYPE
12789 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12790 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12791 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12800 /* Fold a unary tree expression with code CODE of type TYPE with an
12801 operand OP0. Return a folded expression if successful. Otherwise,
12802 return a tree expression with code CODE of type TYPE with an
12806 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12809 #ifdef ENABLE_FOLD_CHECKING
12810 unsigned char checksum_before[16], checksum_after[16];
12811 struct md5_ctx ctx;
12814 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12815 md5_init_ctx (&ctx);
12816 fold_checksum_tree (op0, &ctx, ht);
12817 md5_finish_ctx (&ctx, checksum_before);
12821 tem = fold_unary (code, type, op0);
12823 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12825 #ifdef ENABLE_FOLD_CHECKING
12826 md5_init_ctx (&ctx);
12827 fold_checksum_tree (op0, &ctx, ht);
12828 md5_finish_ctx (&ctx, checksum_after);
12831 if (memcmp (checksum_before, checksum_after, 16))
12832 fold_check_failed (op0, tem);
12837 /* Fold a binary tree expression with code CODE of type TYPE with
12838 operands OP0 and OP1. Return a folded expression if successful.
12839 Otherwise, return a tree expression with code CODE of type TYPE
12840 with operands OP0 and OP1. */
12843 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12847 #ifdef ENABLE_FOLD_CHECKING
12848 unsigned char checksum_before_op0[16],
12849 checksum_before_op1[16],
12850 checksum_after_op0[16],
12851 checksum_after_op1[16];
12852 struct md5_ctx ctx;
12855 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12856 md5_init_ctx (&ctx);
12857 fold_checksum_tree (op0, &ctx, ht);
12858 md5_finish_ctx (&ctx, checksum_before_op0);
12861 md5_init_ctx (&ctx);
12862 fold_checksum_tree (op1, &ctx, ht);
12863 md5_finish_ctx (&ctx, checksum_before_op1);
12867 tem = fold_binary (code, type, op0, op1);
12869 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12871 #ifdef ENABLE_FOLD_CHECKING
12872 md5_init_ctx (&ctx);
12873 fold_checksum_tree (op0, &ctx, ht);
12874 md5_finish_ctx (&ctx, checksum_after_op0);
12877 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12878 fold_check_failed (op0, tem);
12880 md5_init_ctx (&ctx);
12881 fold_checksum_tree (op1, &ctx, ht);
12882 md5_finish_ctx (&ctx, checksum_after_op1);
12885 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12886 fold_check_failed (op1, tem);
12891 /* Fold a ternary tree expression with code CODE of type TYPE with
12892 operands OP0, OP1, and OP2. Return a folded expression if
12893 successful. Otherwise, return a tree expression with code CODE of
12894 type TYPE with operands OP0, OP1, and OP2. */
12897 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12901 #ifdef ENABLE_FOLD_CHECKING
12902 unsigned char checksum_before_op0[16],
12903 checksum_before_op1[16],
12904 checksum_before_op2[16],
12905 checksum_after_op0[16],
12906 checksum_after_op1[16],
12907 checksum_after_op2[16];
12908 struct md5_ctx ctx;
12911 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12912 md5_init_ctx (&ctx);
12913 fold_checksum_tree (op0, &ctx, ht);
12914 md5_finish_ctx (&ctx, checksum_before_op0);
12917 md5_init_ctx (&ctx);
12918 fold_checksum_tree (op1, &ctx, ht);
12919 md5_finish_ctx (&ctx, checksum_before_op1);
12922 md5_init_ctx (&ctx);
12923 fold_checksum_tree (op2, &ctx, ht);
12924 md5_finish_ctx (&ctx, checksum_before_op2);
12928 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12929 tem = fold_ternary (code, type, op0, op1, op2);
12931 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12933 #ifdef ENABLE_FOLD_CHECKING
12934 md5_init_ctx (&ctx);
12935 fold_checksum_tree (op0, &ctx, ht);
12936 md5_finish_ctx (&ctx, checksum_after_op0);
12939 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12940 fold_check_failed (op0, tem);
12942 md5_init_ctx (&ctx);
12943 fold_checksum_tree (op1, &ctx, ht);
12944 md5_finish_ctx (&ctx, checksum_after_op1);
12947 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12948 fold_check_failed (op1, tem);
12950 md5_init_ctx (&ctx);
12951 fold_checksum_tree (op2, &ctx, ht);
12952 md5_finish_ctx (&ctx, checksum_after_op2);
12955 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12956 fold_check_failed (op2, tem);
12961 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12962 arguments in ARGARRAY, and a null static chain.
12963 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12964 of type TYPE from the given operands as constructed by build_call_array. */
12967 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
12970 #ifdef ENABLE_FOLD_CHECKING
12971 unsigned char checksum_before_fn[16],
12972 checksum_before_arglist[16],
12973 checksum_after_fn[16],
12974 checksum_after_arglist[16];
12975 struct md5_ctx ctx;
12979 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12980 md5_init_ctx (&ctx);
12981 fold_checksum_tree (fn, &ctx, ht);
12982 md5_finish_ctx (&ctx, checksum_before_fn);
12985 md5_init_ctx (&ctx);
12986 for (i = 0; i < nargs; i++)
12987 fold_checksum_tree (argarray[i], &ctx, ht);
12988 md5_finish_ctx (&ctx, checksum_before_arglist);
12992 tem = fold_builtin_call_array (type, fn, nargs, argarray);
12994 #ifdef ENABLE_FOLD_CHECKING
12995 md5_init_ctx (&ctx);
12996 fold_checksum_tree (fn, &ctx, ht);
12997 md5_finish_ctx (&ctx, checksum_after_fn);
13000 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13001 fold_check_failed (fn, tem);
13003 md5_init_ctx (&ctx);
13004 for (i = 0; i < nargs; i++)
13005 fold_checksum_tree (argarray[i], &ctx, ht);
13006 md5_finish_ctx (&ctx, checksum_after_arglist);
13009 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13010 fold_check_failed (NULL_TREE, tem);
13015 /* Perform constant folding and related simplification of initializer
13016 expression EXPR. These behave identically to "fold_buildN" but ignore
13017 potential run-time traps and exceptions that fold must preserve. */
13019 #define START_FOLD_INIT \
13020 int saved_signaling_nans = flag_signaling_nans;\
13021 int saved_trapping_math = flag_trapping_math;\
13022 int saved_rounding_math = flag_rounding_math;\
13023 int saved_trapv = flag_trapv;\
13024 int saved_folding_initializer = folding_initializer;\
13025 flag_signaling_nans = 0;\
13026 flag_trapping_math = 0;\
13027 flag_rounding_math = 0;\
13029 folding_initializer = 1;
13031 #define END_FOLD_INIT \
13032 flag_signaling_nans = saved_signaling_nans;\
13033 flag_trapping_math = saved_trapping_math;\
13034 flag_rounding_math = saved_rounding_math;\
13035 flag_trapv = saved_trapv;\
13036 folding_initializer = saved_folding_initializer;
13039 fold_build1_initializer (enum tree_code code, tree type, tree op)
13044 result = fold_build1 (code, type, op);
13051 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13056 result = fold_build2 (code, type, op0, op1);
13063 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13069 result = fold_build3 (code, type, op0, op1, op2);
13076 fold_build_call_array_initializer (tree type, tree fn,
13077 int nargs, tree *argarray)
13082 result = fold_build_call_array (type, fn, nargs, argarray);
13088 #undef START_FOLD_INIT
13089 #undef END_FOLD_INIT
13091 /* Determine if first argument is a multiple of second argument. Return 0 if
13092 it is not, or we cannot easily determined it to be.
13094 An example of the sort of thing we care about (at this point; this routine
13095 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13096 fold cases do now) is discovering that
13098 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13104 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13106 This code also handles discovering that
13108 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13110 is a multiple of 8 so we don't have to worry about dealing with a
13111 possible remainder.
13113 Note that we *look* inside a SAVE_EXPR only to determine how it was
13114 calculated; it is not safe for fold to do much of anything else with the
13115 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13116 at run time. For example, the latter example above *cannot* be implemented
13117 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13118 evaluation time of the original SAVE_EXPR is not necessarily the same at
13119 the time the new expression is evaluated. The only optimization of this
13120 sort that would be valid is changing
13122 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13126 SAVE_EXPR (I) * SAVE_EXPR (J)
13128 (where the same SAVE_EXPR (J) is used in the original and the
13129 transformed version). */
13132 multiple_of_p (tree type, tree top, tree bottom)
13134 if (operand_equal_p (top, bottom, 0))
13137 if (TREE_CODE (type) != INTEGER_TYPE)
13140 switch (TREE_CODE (top))
13143 /* Bitwise and provides a power of two multiple. If the mask is
13144 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13145 if (!integer_pow2p (bottom))
13150 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13151 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13155 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13156 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13159 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13163 op1 = TREE_OPERAND (top, 1);
13164 /* const_binop may not detect overflow correctly,
13165 so check for it explicitly here. */
13166 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13167 > TREE_INT_CST_LOW (op1)
13168 && TREE_INT_CST_HIGH (op1) == 0
13169 && 0 != (t1 = fold_convert (type,
13170 const_binop (LSHIFT_EXPR,
13173 && !TREE_OVERFLOW (t1))
13174 return multiple_of_p (type, t1, bottom);
13179 /* Can't handle conversions from non-integral or wider integral type. */
13180 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13181 || (TYPE_PRECISION (type)
13182 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13185 /* .. fall through ... */
13188 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13191 if (TREE_CODE (bottom) != INTEGER_CST
13192 || (TYPE_UNSIGNED (type)
13193 && (tree_int_cst_sgn (top) < 0
13194 || tree_int_cst_sgn (bottom) < 0)))
13196 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13204 /* Return true if `t' is known to be non-negative. If the return
13205 value is based on the assumption that signed overflow is undefined,
13206 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13207 *STRICT_OVERFLOW_P. */
13210 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13212 if (t == error_mark_node)
13215 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13218 switch (TREE_CODE (t))
13221 /* Query VRP to see if it has recorded any information about
13222 the range of this object. */
13223 return ssa_name_nonnegative_p (t);
13226 /* We can't return 1 if flag_wrapv is set because
13227 ABS_EXPR<INT_MIN> = INT_MIN. */
13228 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13230 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13232 *strict_overflow_p = true;
13238 return tree_int_cst_sgn (t) >= 0;
13241 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13244 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13245 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13247 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13248 strict_overflow_p));
13250 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13251 both unsigned and at least 2 bits shorter than the result. */
13252 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13253 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13254 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13256 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13257 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13258 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13259 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13261 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13262 TYPE_PRECISION (inner2)) + 1;
13263 return prec < TYPE_PRECISION (TREE_TYPE (t));
13269 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13271 /* x * x for floating point x is always non-negative. */
13272 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13274 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13276 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13277 strict_overflow_p));
13280 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13281 both unsigned and their total bits is shorter than the result. */
13282 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13283 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13284 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13286 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13287 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13288 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13289 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13290 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13291 < TYPE_PRECISION (TREE_TYPE (t));
13297 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13299 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13300 strict_overflow_p));
13306 case TRUNC_DIV_EXPR:
13307 case CEIL_DIV_EXPR:
13308 case FLOOR_DIV_EXPR:
13309 case ROUND_DIV_EXPR:
13310 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13312 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13313 strict_overflow_p));
13315 case TRUNC_MOD_EXPR:
13316 case CEIL_MOD_EXPR:
13317 case FLOOR_MOD_EXPR:
13318 case ROUND_MOD_EXPR:
13320 case NON_LVALUE_EXPR:
13322 case FIX_TRUNC_EXPR:
13323 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13324 strict_overflow_p);
13326 case COMPOUND_EXPR:
13328 case GIMPLE_MODIFY_STMT:
13329 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13330 strict_overflow_p);
13333 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13334 strict_overflow_p);
13337 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13339 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13340 strict_overflow_p));
13344 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13345 tree outer_type = TREE_TYPE (t);
13347 if (TREE_CODE (outer_type) == REAL_TYPE)
13349 if (TREE_CODE (inner_type) == REAL_TYPE)
13350 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13351 strict_overflow_p);
13352 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13354 if (TYPE_UNSIGNED (inner_type))
13356 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13357 strict_overflow_p);
13360 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13362 if (TREE_CODE (inner_type) == REAL_TYPE)
13363 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13364 strict_overflow_p);
13365 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13366 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13367 && TYPE_UNSIGNED (inner_type);
13374 tree temp = TARGET_EXPR_SLOT (t);
13375 t = TARGET_EXPR_INITIAL (t);
13377 /* If the initializer is non-void, then it's a normal expression
13378 that will be assigned to the slot. */
13379 if (!VOID_TYPE_P (t))
13380 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13382 /* Otherwise, the initializer sets the slot in some way. One common
13383 way is an assignment statement at the end of the initializer. */
13386 if (TREE_CODE (t) == BIND_EXPR)
13387 t = expr_last (BIND_EXPR_BODY (t));
13388 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13389 || TREE_CODE (t) == TRY_CATCH_EXPR)
13390 t = expr_last (TREE_OPERAND (t, 0));
13391 else if (TREE_CODE (t) == STATEMENT_LIST)
13396 if ((TREE_CODE (t) == MODIFY_EXPR
13397 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13398 && GENERIC_TREE_OPERAND (t, 0) == temp)
13399 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13400 strict_overflow_p);
13407 tree fndecl = get_callee_fndecl (t);
13408 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13409 switch (DECL_FUNCTION_CODE (fndecl))
13411 CASE_FLT_FN (BUILT_IN_ACOS):
13412 CASE_FLT_FN (BUILT_IN_ACOSH):
13413 CASE_FLT_FN (BUILT_IN_CABS):
13414 CASE_FLT_FN (BUILT_IN_COSH):
13415 CASE_FLT_FN (BUILT_IN_ERFC):
13416 CASE_FLT_FN (BUILT_IN_EXP):
13417 CASE_FLT_FN (BUILT_IN_EXP10):
13418 CASE_FLT_FN (BUILT_IN_EXP2):
13419 CASE_FLT_FN (BUILT_IN_FABS):
13420 CASE_FLT_FN (BUILT_IN_FDIM):
13421 CASE_FLT_FN (BUILT_IN_HYPOT):
13422 CASE_FLT_FN (BUILT_IN_POW10):
13423 CASE_INT_FN (BUILT_IN_FFS):
13424 CASE_INT_FN (BUILT_IN_PARITY):
13425 CASE_INT_FN (BUILT_IN_POPCOUNT):
13426 case BUILT_IN_BSWAP32:
13427 case BUILT_IN_BSWAP64:
13431 CASE_FLT_FN (BUILT_IN_SQRT):
13432 /* sqrt(-0.0) is -0.0. */
13433 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13435 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13436 strict_overflow_p);
13438 CASE_FLT_FN (BUILT_IN_ASINH):
13439 CASE_FLT_FN (BUILT_IN_ATAN):
13440 CASE_FLT_FN (BUILT_IN_ATANH):
13441 CASE_FLT_FN (BUILT_IN_CBRT):
13442 CASE_FLT_FN (BUILT_IN_CEIL):
13443 CASE_FLT_FN (BUILT_IN_ERF):
13444 CASE_FLT_FN (BUILT_IN_EXPM1):
13445 CASE_FLT_FN (BUILT_IN_FLOOR):
13446 CASE_FLT_FN (BUILT_IN_FMOD):
13447 CASE_FLT_FN (BUILT_IN_FREXP):
13448 CASE_FLT_FN (BUILT_IN_LCEIL):
13449 CASE_FLT_FN (BUILT_IN_LDEXP):
13450 CASE_FLT_FN (BUILT_IN_LFLOOR):
13451 CASE_FLT_FN (BUILT_IN_LLCEIL):
13452 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13453 CASE_FLT_FN (BUILT_IN_LLRINT):
13454 CASE_FLT_FN (BUILT_IN_LLROUND):
13455 CASE_FLT_FN (BUILT_IN_LRINT):
13456 CASE_FLT_FN (BUILT_IN_LROUND):
13457 CASE_FLT_FN (BUILT_IN_MODF):
13458 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13459 CASE_FLT_FN (BUILT_IN_RINT):
13460 CASE_FLT_FN (BUILT_IN_ROUND):
13461 CASE_FLT_FN (BUILT_IN_SCALB):
13462 CASE_FLT_FN (BUILT_IN_SCALBLN):
13463 CASE_FLT_FN (BUILT_IN_SCALBN):
13464 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13465 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13466 CASE_FLT_FN (BUILT_IN_SINH):
13467 CASE_FLT_FN (BUILT_IN_TANH):
13468 CASE_FLT_FN (BUILT_IN_TRUNC):
13469 /* True if the 1st argument is nonnegative. */
13470 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13471 strict_overflow_p);
13473 CASE_FLT_FN (BUILT_IN_FMAX):
13474 /* True if the 1st OR 2nd arguments are nonnegative. */
13475 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13477 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13478 strict_overflow_p)));
13480 CASE_FLT_FN (BUILT_IN_FMIN):
13481 /* True if the 1st AND 2nd arguments are nonnegative. */
13482 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13484 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13485 strict_overflow_p)));
13487 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13488 /* True if the 2nd argument is nonnegative. */
13489 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13490 strict_overflow_p);
13492 CASE_FLT_FN (BUILT_IN_POWI):
13493 /* True if the 1st argument is nonnegative or the second
13494 argument is an even integer. */
13495 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13497 tree arg1 = CALL_EXPR_ARG (t, 1);
13498 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13501 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13502 strict_overflow_p);
13504 CASE_FLT_FN (BUILT_IN_POW):
13505 /* True if the 1st argument is nonnegative or the second
13506 argument is an even integer valued real. */
13507 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13512 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13513 n = real_to_integer (&c);
13516 REAL_VALUE_TYPE cint;
13517 real_from_integer (&cint, VOIDmode, n,
13518 n < 0 ? -1 : 0, 0);
13519 if (real_identical (&c, &cint))
13523 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13524 strict_overflow_p);
13531 /* ... fall through ... */
13534 if (truth_value_p (TREE_CODE (t)))
13535 /* Truth values evaluate to 0 or 1, which is nonnegative. */
13539 /* We don't know sign of `t', so be conservative and return false. */
13543 /* Return true if `t' is known to be non-negative. Handle warnings
13544 about undefined signed overflow. */
13547 tree_expr_nonnegative_p (tree t)
13549 bool ret, strict_overflow_p;
13551 strict_overflow_p = false;
13552 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13553 if (strict_overflow_p)
13554 fold_overflow_warning (("assuming signed overflow does not occur when "
13555 "determining that expression is always "
13557 WARN_STRICT_OVERFLOW_MISC);
13561 /* Return true when T is an address and is known to be nonzero.
13562 For floating point we further ensure that T is not denormal.
13563 Similar logic is present in nonzero_address in rtlanal.h.
13565 If the return value is based on the assumption that signed overflow
13566 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13567 change *STRICT_OVERFLOW_P. */
13570 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13572 tree type = TREE_TYPE (t);
13573 bool sub_strict_overflow_p;
13575 /* Doing something useful for floating point would need more work. */
13576 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13579 switch (TREE_CODE (t))
13582 /* Query VRP to see if it has recorded any information about
13583 the range of this object. */
13584 return ssa_name_nonzero_p (t);
13587 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13588 strict_overflow_p);
13591 return !integer_zerop (t);
13594 if (TYPE_OVERFLOW_UNDEFINED (type))
13596 /* With the presence of negative values it is hard
13597 to say something. */
13598 sub_strict_overflow_p = false;
13599 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13600 &sub_strict_overflow_p)
13601 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13602 &sub_strict_overflow_p))
13604 /* One of operands must be positive and the other non-negative. */
13605 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13606 overflows, on a twos-complement machine the sum of two
13607 nonnegative numbers can never be zero. */
13608 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13610 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13611 strict_overflow_p));
13616 if (TYPE_OVERFLOW_UNDEFINED (type))
13618 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13620 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13621 strict_overflow_p))
13623 *strict_overflow_p = true;
13631 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13632 tree outer_type = TREE_TYPE (t);
13634 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13635 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13636 strict_overflow_p));
13642 tree base = get_base_address (TREE_OPERAND (t, 0));
13647 /* Weak declarations may link to NULL. */
13648 if (VAR_OR_FUNCTION_DECL_P (base))
13649 return !DECL_WEAK (base);
13651 /* Constants are never weak. */
13652 if (CONSTANT_CLASS_P (base))
13659 sub_strict_overflow_p = false;
13660 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13661 &sub_strict_overflow_p)
13662 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13663 &sub_strict_overflow_p))
13665 if (sub_strict_overflow_p)
13666 *strict_overflow_p = true;
13672 sub_strict_overflow_p = false;
13673 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13674 &sub_strict_overflow_p)
13675 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13676 &sub_strict_overflow_p))
13678 if (sub_strict_overflow_p)
13679 *strict_overflow_p = true;
13684 sub_strict_overflow_p = false;
13685 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13686 &sub_strict_overflow_p))
13688 if (sub_strict_overflow_p)
13689 *strict_overflow_p = true;
13691 /* When both operands are nonzero, then MAX must be too. */
13692 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13693 strict_overflow_p))
13696 /* MAX where operand 0 is positive is positive. */
13697 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13698 strict_overflow_p);
13700 /* MAX where operand 1 is positive is positive. */
13701 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13702 &sub_strict_overflow_p)
13703 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13704 &sub_strict_overflow_p))
13706 if (sub_strict_overflow_p)
13707 *strict_overflow_p = true;
13712 case COMPOUND_EXPR:
13714 case GIMPLE_MODIFY_STMT:
13716 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13717 strict_overflow_p);
13720 case NON_LVALUE_EXPR:
13721 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13722 strict_overflow_p);
13725 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13727 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13728 strict_overflow_p));
13731 return alloca_call_p (t);
13739 /* Return true when T is an address and is known to be nonzero.
13740 Handle warnings about undefined signed overflow. */
13743 tree_expr_nonzero_p (tree t)
13745 bool ret, strict_overflow_p;
13747 strict_overflow_p = false;
13748 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13749 if (strict_overflow_p)
13750 fold_overflow_warning (("assuming signed overflow does not occur when "
13751 "determining that expression is always "
13753 WARN_STRICT_OVERFLOW_MISC);
13757 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13758 attempt to fold the expression to a constant without modifying TYPE,
13761 If the expression could be simplified to a constant, then return
13762 the constant. If the expression would not be simplified to a
13763 constant, then return NULL_TREE. */
13766 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13768 tree tem = fold_binary (code, type, op0, op1);
13769 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13772 /* Given the components of a unary expression CODE, TYPE and OP0,
13773 attempt to fold the expression to a constant without modifying
13776 If the expression could be simplified to a constant, then return
13777 the constant. If the expression would not be simplified to a
13778 constant, then return NULL_TREE. */
13781 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13783 tree tem = fold_unary (code, type, op0);
13784 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13787 /* If EXP represents referencing an element in a constant string
13788 (either via pointer arithmetic or array indexing), return the
13789 tree representing the value accessed, otherwise return NULL. */
13792 fold_read_from_constant_string (tree exp)
13794 if ((TREE_CODE (exp) == INDIRECT_REF
13795 || TREE_CODE (exp) == ARRAY_REF)
13796 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13798 tree exp1 = TREE_OPERAND (exp, 0);
13802 if (TREE_CODE (exp) == INDIRECT_REF)
13803 string = string_constant (exp1, &index);
13806 tree low_bound = array_ref_low_bound (exp);
13807 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13809 /* Optimize the special-case of a zero lower bound.
13811 We convert the low_bound to sizetype to avoid some problems
13812 with constant folding. (E.g. suppose the lower bound is 1,
13813 and its mode is QI. Without the conversion,l (ARRAY
13814 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13815 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13816 if (! integer_zerop (low_bound))
13817 index = size_diffop (index, fold_convert (sizetype, low_bound));
13823 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13824 && TREE_CODE (string) == STRING_CST
13825 && TREE_CODE (index) == INTEGER_CST
13826 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13827 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13829 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13830 return fold_convert (TREE_TYPE (exp),
13831 build_int_cst (NULL_TREE,
13832 (TREE_STRING_POINTER (string)
13833 [TREE_INT_CST_LOW (index)])));
13838 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13839 an integer constant or real constant.
13841 TYPE is the type of the result. */
13844 fold_negate_const (tree arg0, tree type)
13846 tree t = NULL_TREE;
13848 switch (TREE_CODE (arg0))
13852 unsigned HOST_WIDE_INT low;
13853 HOST_WIDE_INT high;
13854 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13855 TREE_INT_CST_HIGH (arg0),
13857 t = force_fit_type_double (type, low, high, 1,
13858 (overflow | TREE_OVERFLOW (arg0))
13859 && !TYPE_UNSIGNED (type));
13864 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13868 gcc_unreachable ();
13874 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13875 an integer constant or real constant.
13877 TYPE is the type of the result. */
13880 fold_abs_const (tree arg0, tree type)
13882 tree t = NULL_TREE;
13884 switch (TREE_CODE (arg0))
13887 /* If the value is unsigned, then the absolute value is
13888 the same as the ordinary value. */
13889 if (TYPE_UNSIGNED (type))
13891 /* Similarly, if the value is non-negative. */
13892 else if (INT_CST_LT (integer_minus_one_node, arg0))
13894 /* If the value is negative, then the absolute value is
13898 unsigned HOST_WIDE_INT low;
13899 HOST_WIDE_INT high;
13900 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13901 TREE_INT_CST_HIGH (arg0),
13903 t = force_fit_type_double (type, low, high, -1,
13904 overflow | TREE_OVERFLOW (arg0));
13909 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13910 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13916 gcc_unreachable ();
13922 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13923 constant. TYPE is the type of the result. */
13926 fold_not_const (tree arg0, tree type)
13928 tree t = NULL_TREE;
13930 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13932 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13933 ~TREE_INT_CST_HIGH (arg0), 0,
13934 TREE_OVERFLOW (arg0));
13939 /* Given CODE, a relational operator, the target type, TYPE and two
13940 constant operands OP0 and OP1, return the result of the
13941 relational operation. If the result is not a compile time
13942 constant, then return NULL_TREE. */
13945 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13947 int result, invert;
13949 /* From here on, the only cases we handle are when the result is
13950 known to be a constant. */
13952 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13954 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13955 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13957 /* Handle the cases where either operand is a NaN. */
13958 if (real_isnan (c0) || real_isnan (c1))
13968 case UNORDERED_EXPR:
13982 if (flag_trapping_math)
13988 gcc_unreachable ();
13991 return constant_boolean_node (result, type);
13994 return constant_boolean_node (real_compare (code, c0, c1), type);
13997 /* Handle equality/inequality of complex constants. */
13998 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14000 tree rcond = fold_relational_const (code, type,
14001 TREE_REALPART (op0),
14002 TREE_REALPART (op1));
14003 tree icond = fold_relational_const (code, type,
14004 TREE_IMAGPART (op0),
14005 TREE_IMAGPART (op1));
14006 if (code == EQ_EXPR)
14007 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14008 else if (code == NE_EXPR)
14009 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14014 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14016 To compute GT, swap the arguments and do LT.
14017 To compute GE, do LT and invert the result.
14018 To compute LE, swap the arguments, do LT and invert the result.
14019 To compute NE, do EQ and invert the result.
14021 Therefore, the code below must handle only EQ and LT. */
14023 if (code == LE_EXPR || code == GT_EXPR)
14028 code = swap_tree_comparison (code);
14031 /* Note that it is safe to invert for real values here because we
14032 have already handled the one case that it matters. */
14035 if (code == NE_EXPR || code == GE_EXPR)
14038 code = invert_tree_comparison (code, false);
14041 /* Compute a result for LT or EQ if args permit;
14042 Otherwise return T. */
14043 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14045 if (code == EQ_EXPR)
14046 result = tree_int_cst_equal (op0, op1);
14047 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14048 result = INT_CST_LT_UNSIGNED (op0, op1);
14050 result = INT_CST_LT (op0, op1);
14057 return constant_boolean_node (result, type);
14060 /* Build an expression for the a clean point containing EXPR with type TYPE.
14061 Don't build a cleanup point expression for EXPR which don't have side
14065 fold_build_cleanup_point_expr (tree type, tree expr)
14067 /* If the expression does not have side effects then we don't have to wrap
14068 it with a cleanup point expression. */
14069 if (!TREE_SIDE_EFFECTS (expr))
14072 /* If the expression is a return, check to see if the expression inside the
14073 return has no side effects or the right hand side of the modify expression
14074 inside the return. If either don't have side effects set we don't need to
14075 wrap the expression in a cleanup point expression. Note we don't check the
14076 left hand side of the modify because it should always be a return decl. */
14077 if (TREE_CODE (expr) == RETURN_EXPR)
14079 tree op = TREE_OPERAND (expr, 0);
14080 if (!op || !TREE_SIDE_EFFECTS (op))
14082 op = TREE_OPERAND (op, 1);
14083 if (!TREE_SIDE_EFFECTS (op))
14087 return build1 (CLEANUP_POINT_EXPR, type, expr);
14090 /* Build an expression for the address of T. Folds away INDIRECT_REF to
14091 avoid confusing the gimplify process. */
14094 build_fold_addr_expr_with_type (tree t, tree ptrtype)
14096 /* The size of the object is not relevant when talking about its address. */
14097 if (TREE_CODE (t) == WITH_SIZE_EXPR)
14098 t = TREE_OPERAND (t, 0);
14100 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
14101 if (TREE_CODE (t) == INDIRECT_REF
14102 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
14104 t = TREE_OPERAND (t, 0);
14105 if (TREE_TYPE (t) != ptrtype)
14106 t = build1 (NOP_EXPR, ptrtype, t);
14112 while (handled_component_p (base))
14113 base = TREE_OPERAND (base, 0);
14115 TREE_ADDRESSABLE (base) = 1;
14117 t = build1 (ADDR_EXPR, ptrtype, t);
14124 build_fold_addr_expr (tree t)
14126 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
14129 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14130 of an indirection through OP0, or NULL_TREE if no simplification is
14134 fold_indirect_ref_1 (tree type, tree op0)
14140 subtype = TREE_TYPE (sub);
14141 if (!POINTER_TYPE_P (subtype))
14144 if (TREE_CODE (sub) == ADDR_EXPR)
14146 tree op = TREE_OPERAND (sub, 0);
14147 tree optype = TREE_TYPE (op);
14148 /* *&CONST_DECL -> to the value of the const decl. */
14149 if (TREE_CODE (op) == CONST_DECL)
14150 return DECL_INITIAL (op);
14151 /* *&p => p; make sure to handle *&"str"[cst] here. */
14152 if (type == optype)
14154 tree fop = fold_read_from_constant_string (op);
14160 /* *(foo *)&fooarray => fooarray[0] */
14161 else if (TREE_CODE (optype) == ARRAY_TYPE
14162 && type == TREE_TYPE (optype))
14164 tree type_domain = TYPE_DOMAIN (optype);
14165 tree min_val = size_zero_node;
14166 if (type_domain && TYPE_MIN_VALUE (type_domain))
14167 min_val = TYPE_MIN_VALUE (type_domain);
14168 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14170 /* *(foo *)&complexfoo => __real__ complexfoo */
14171 else if (TREE_CODE (optype) == COMPLEX_TYPE
14172 && type == TREE_TYPE (optype))
14173 return fold_build1 (REALPART_EXPR, type, op);
14174 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14175 else if (TREE_CODE (optype) == VECTOR_TYPE
14176 && type == TREE_TYPE (optype))
14178 tree part_width = TYPE_SIZE (type);
14179 tree index = bitsize_int (0);
14180 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14184 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14185 if (TREE_CODE (sub) == PLUS_EXPR
14186 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14188 tree op00 = TREE_OPERAND (sub, 0);
14189 tree op01 = TREE_OPERAND (sub, 1);
14193 op00type = TREE_TYPE (op00);
14194 if (TREE_CODE (op00) == ADDR_EXPR
14195 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14196 && type == TREE_TYPE (TREE_TYPE (op00type)))
14198 tree size = TYPE_SIZE_UNIT (type);
14199 if (tree_int_cst_equal (size, op01))
14200 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14204 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14205 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14206 && type == TREE_TYPE (TREE_TYPE (subtype)))
14209 tree min_val = size_zero_node;
14210 sub = build_fold_indirect_ref (sub);
14211 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14212 if (type_domain && TYPE_MIN_VALUE (type_domain))
14213 min_val = TYPE_MIN_VALUE (type_domain);
14214 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14220 /* Builds an expression for an indirection through T, simplifying some
14224 build_fold_indirect_ref (tree t)
14226 tree type = TREE_TYPE (TREE_TYPE (t));
14227 tree sub = fold_indirect_ref_1 (type, t);
14232 return build1 (INDIRECT_REF, type, t);
14235 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14238 fold_indirect_ref (tree t)
14240 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14248 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14249 whose result is ignored. The type of the returned tree need not be
14250 the same as the original expression. */
14253 fold_ignored_result (tree t)
14255 if (!TREE_SIDE_EFFECTS (t))
14256 return integer_zero_node;
14259 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14262 t = TREE_OPERAND (t, 0);
14266 case tcc_comparison:
14267 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14268 t = TREE_OPERAND (t, 0);
14269 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14270 t = TREE_OPERAND (t, 1);
14275 case tcc_expression:
14276 switch (TREE_CODE (t))
14278 case COMPOUND_EXPR:
14279 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14281 t = TREE_OPERAND (t, 0);
14285 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14286 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14288 t = TREE_OPERAND (t, 0);
14301 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14302 This can only be applied to objects of a sizetype. */
14305 round_up (tree value, int divisor)
14307 tree div = NULL_TREE;
14309 gcc_assert (divisor > 0);
14313 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14314 have to do anything. Only do this when we are not given a const,
14315 because in that case, this check is more expensive than just
14317 if (TREE_CODE (value) != INTEGER_CST)
14319 div = build_int_cst (TREE_TYPE (value), divisor);
14321 if (multiple_of_p (TREE_TYPE (value), value, div))
14325 /* If divisor is a power of two, simplify this to bit manipulation. */
14326 if (divisor == (divisor & -divisor))
14328 if (TREE_CODE (value) == INTEGER_CST)
14330 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14331 unsigned HOST_WIDE_INT high;
14334 if ((low & (divisor - 1)) == 0)
14337 overflow_p = TREE_OVERFLOW (value);
14338 high = TREE_INT_CST_HIGH (value);
14339 low &= ~(divisor - 1);
14348 return force_fit_type_double (TREE_TYPE (value), low, high,
14355 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14356 value = size_binop (PLUS_EXPR, value, t);
14357 t = build_int_cst (TREE_TYPE (value), -divisor);
14358 value = size_binop (BIT_AND_EXPR, value, t);
14364 div = build_int_cst (TREE_TYPE (value), divisor);
14365 value = size_binop (CEIL_DIV_EXPR, value, div);
14366 value = size_binop (MULT_EXPR, value, div);
14372 /* Likewise, but round down. */
14375 round_down (tree value, int divisor)
14377 tree div = NULL_TREE;
14379 gcc_assert (divisor > 0);
14383 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14384 have to do anything. Only do this when we are not given a const,
14385 because in that case, this check is more expensive than just
14387 if (TREE_CODE (value) != INTEGER_CST)
14389 div = build_int_cst (TREE_TYPE (value), divisor);
14391 if (multiple_of_p (TREE_TYPE (value), value, div))
14395 /* If divisor is a power of two, simplify this to bit manipulation. */
14396 if (divisor == (divisor & -divisor))
14400 t = build_int_cst (TREE_TYPE (value), -divisor);
14401 value = size_binop (BIT_AND_EXPR, value, t);
14406 div = build_int_cst (TREE_TYPE (value), divisor);
14407 value = size_binop (FLOOR_DIV_EXPR, value, div);
14408 value = size_binop (MULT_EXPR, value, div);
14414 /* Returns the pointer to the base of the object addressed by EXP and
14415 extracts the information about the offset of the access, storing it
14416 to PBITPOS and POFFSET. */
14419 split_address_to_core_and_offset (tree exp,
14420 HOST_WIDE_INT *pbitpos, tree *poffset)
14423 enum machine_mode mode;
14424 int unsignedp, volatilep;
14425 HOST_WIDE_INT bitsize;
14427 if (TREE_CODE (exp) == ADDR_EXPR)
14429 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14430 poffset, &mode, &unsignedp, &volatilep,
14432 core = build_fold_addr_expr (core);
14438 *poffset = NULL_TREE;
14444 /* Returns true if addresses of E1 and E2 differ by a constant, false
14445 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14448 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14451 HOST_WIDE_INT bitpos1, bitpos2;
14452 tree toffset1, toffset2, tdiff, type;
14454 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14455 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14457 if (bitpos1 % BITS_PER_UNIT != 0
14458 || bitpos2 % BITS_PER_UNIT != 0
14459 || !operand_equal_p (core1, core2, 0))
14462 if (toffset1 && toffset2)
14464 type = TREE_TYPE (toffset1);
14465 if (type != TREE_TYPE (toffset2))
14466 toffset2 = fold_convert (type, toffset2);
14468 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14469 if (!cst_and_fits_in_hwi (tdiff))
14472 *diff = int_cst_value (tdiff);
14474 else if (toffset1 || toffset2)
14476 /* If only one of the offsets is non-constant, the difference cannot
14483 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14487 /* Simplify the floating point expression EXP when the sign of the
14488 result is not significant. Return NULL_TREE if no simplification
14492 fold_strip_sign_ops (tree exp)
14496 switch (TREE_CODE (exp))
14500 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14501 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14505 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14507 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14508 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14509 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14510 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14511 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14512 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14515 case COMPOUND_EXPR:
14516 arg0 = TREE_OPERAND (exp, 0);
14517 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14519 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14523 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14524 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14526 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14527 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14528 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14533 const enum built_in_function fcode = builtin_mathfn_code (exp);
14536 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14537 /* Strip copysign function call, return the 1st argument. */
14538 arg0 = CALL_EXPR_ARG (exp, 0);
14539 arg1 = CALL_EXPR_ARG (exp, 1);
14540 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14543 /* Strip sign ops from the argument of "odd" math functions. */
14544 if (negate_mathfn_p (fcode))
14546 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14548 return build_call_expr (get_callee_fndecl (exp), 1, arg0);