1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
64 #include "langhooks.h"
67 /* Non-zero if we are folding constants inside an initializer; zero
69 int folding_initializer = 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code {
93 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
94 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static tree combine_comparisons (enum tree_code, enum tree_code,
104 enum tree_code, tree, tree, tree);
105 static int truth_value_p (enum tree_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, int, int, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static int all_ones_mask_p (tree, int);
117 static tree sign_bit_p (tree, tree);
118 static int simple_operand_p (tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
133 static int multiple_of_p (tree, tree, tree);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
208 int sign_extended_type;
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
214 prec = TYPE_PRECISION (type);
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)l1 < 0)
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
287 int sign_extended_type;
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
295 overflow = fit_double_type (low, high, &low, &high, type);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
302 || (overflowable > 0 && sign_extended_type))
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
329 unsigned HOST_WIDE_INT l;
333 h = h1 + h2 + (l < l1);
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
357 return (*hv & h1) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
395 for (j = 0; j < 4; j++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
449 if (SHIFT_COUNT_TRUNCATED)
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 unsigned HOST_WIDE_INT signmask;
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 if (SHIFT_COUNT_TRUNCATED)
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
707 { /* scale divisor and dividend */
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
786 decode (quo, lquo, hquo);
789 /* If result is negative, make it so. */
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, <wice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
901 return build_int_cst_wide (type, quol, quoh);
904 /* Return true if the built-in mathematical function specified by CODE
905 is odd, i.e. -f(x) == f(-x). */
908 negate_mathfn_p (enum built_in_function code)
912 CASE_FLT_FN (BUILT_IN_ASIN):
913 CASE_FLT_FN (BUILT_IN_ASINH):
914 CASE_FLT_FN (BUILT_IN_ATAN):
915 CASE_FLT_FN (BUILT_IN_ATANH):
916 CASE_FLT_FN (BUILT_IN_CASIN):
917 CASE_FLT_FN (BUILT_IN_CASINH):
918 CASE_FLT_FN (BUILT_IN_CATAN):
919 CASE_FLT_FN (BUILT_IN_CATANH):
920 CASE_FLT_FN (BUILT_IN_CBRT):
921 CASE_FLT_FN (BUILT_IN_CPROJ):
922 CASE_FLT_FN (BUILT_IN_CSIN):
923 CASE_FLT_FN (BUILT_IN_CSINH):
924 CASE_FLT_FN (BUILT_IN_CTAN):
925 CASE_FLT_FN (BUILT_IN_CTANH):
926 CASE_FLT_FN (BUILT_IN_ERF):
927 CASE_FLT_FN (BUILT_IN_LLROUND):
928 CASE_FLT_FN (BUILT_IN_LROUND):
929 CASE_FLT_FN (BUILT_IN_ROUND):
930 CASE_FLT_FN (BUILT_IN_SIN):
931 CASE_FLT_FN (BUILT_IN_SINH):
932 CASE_FLT_FN (BUILT_IN_TAN):
933 CASE_FLT_FN (BUILT_IN_TANH):
934 CASE_FLT_FN (BUILT_IN_TRUNC):
937 CASE_FLT_FN (BUILT_IN_LLRINT):
938 CASE_FLT_FN (BUILT_IN_LRINT):
939 CASE_FLT_FN (BUILT_IN_NEARBYINT):
940 CASE_FLT_FN (BUILT_IN_RINT):
941 return !flag_rounding_math;
949 /* Check whether we may negate an integer constant T without causing
953 may_negate_without_overflow_p (tree t)
955 unsigned HOST_WIDE_INT val;
959 gcc_assert (TREE_CODE (t) == INTEGER_CST);
961 type = TREE_TYPE (t);
962 if (TYPE_UNSIGNED (type))
965 prec = TYPE_PRECISION (type);
966 if (prec > HOST_BITS_PER_WIDE_INT)
968 if (TREE_INT_CST_LOW (t) != 0)
970 prec -= HOST_BITS_PER_WIDE_INT;
971 val = TREE_INT_CST_HIGH (t);
974 val = TREE_INT_CST_LOW (t);
975 if (prec < HOST_BITS_PER_WIDE_INT)
976 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
977 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
980 /* Determine whether an expression T can be cheaply negated using
981 the function negate_expr without introducing undefined overflow. */
984 negate_expr_p (tree t)
991 type = TREE_TYPE (t);
994 switch (TREE_CODE (t))
997 if (TYPE_OVERFLOW_WRAPS (type))
1000 /* Check that -CST will not overflow type. */
1001 return may_negate_without_overflow_p (t);
1003 return (INTEGRAL_TYPE_P (type)
1004 && TYPE_OVERFLOW_WRAPS (type));
1011 return negate_expr_p (TREE_REALPART (t))
1012 && negate_expr_p (TREE_IMAGPART (t));
1015 return negate_expr_p (TREE_OPERAND (t, 0))
1016 && negate_expr_p (TREE_OPERAND (t, 1));
1019 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1020 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1022 /* -(A + B) -> (-B) - A. */
1023 if (negate_expr_p (TREE_OPERAND (t, 1))
1024 && reorder_operands_p (TREE_OPERAND (t, 0),
1025 TREE_OPERAND (t, 1)))
1027 /* -(A + B) -> (-A) - B. */
1028 return negate_expr_p (TREE_OPERAND (t, 0));
1031 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1032 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1033 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1034 && reorder_operands_p (TREE_OPERAND (t, 0),
1035 TREE_OPERAND (t, 1));
1038 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1044 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1045 return negate_expr_p (TREE_OPERAND (t, 1))
1046 || negate_expr_p (TREE_OPERAND (t, 0));
1049 case TRUNC_DIV_EXPR:
1050 case ROUND_DIV_EXPR:
1051 case FLOOR_DIV_EXPR:
1053 case EXACT_DIV_EXPR:
1054 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1055 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1057 return negate_expr_p (TREE_OPERAND (t, 1))
1058 || negate_expr_p (TREE_OPERAND (t, 0));
1061 /* Negate -((double)float) as (double)(-float). */
1062 if (TREE_CODE (type) == REAL_TYPE)
1064 tree tem = strip_float_extensions (t);
1066 return negate_expr_p (tem);
1071 /* Negate -f(x) as f(-x). */
1072 if (negate_mathfn_p (builtin_mathfn_code (t)))
1073 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1077 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1078 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1080 tree op1 = TREE_OPERAND (t, 1);
1081 if (TREE_INT_CST_HIGH (op1) == 0
1082 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1083 == TREE_INT_CST_LOW (op1))
1094 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1095 simplification is possible.
1096 If negate_expr_p would return true for T, NULL_TREE will never be
1100 fold_negate_expr (tree t)
1102 tree type = TREE_TYPE (t);
1105 switch (TREE_CODE (t))
1107 /* Convert - (~A) to A + 1. */
1109 if (INTEGRAL_TYPE_P (type))
1110 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1111 build_int_cst (type, 1));
1115 tem = fold_negate_const (t, type);
1116 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1117 || !TYPE_OVERFLOW_TRAPS (type))
1122 tem = fold_negate_const (t, type);
1123 /* Two's complement FP formats, such as c4x, may overflow. */
1124 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1130 tree rpart = negate_expr (TREE_REALPART (t));
1131 tree ipart = negate_expr (TREE_IMAGPART (t));
1133 if ((TREE_CODE (rpart) == REAL_CST
1134 && TREE_CODE (ipart) == REAL_CST)
1135 || (TREE_CODE (rpart) == INTEGER_CST
1136 && TREE_CODE (ipart) == INTEGER_CST))
1137 return build_complex (type, rpart, ipart);
1142 if (negate_expr_p (t))
1143 return fold_build2 (COMPLEX_EXPR, type,
1144 fold_negate_expr (TREE_OPERAND (t, 0)),
1145 fold_negate_expr (TREE_OPERAND (t, 1)));
1149 return TREE_OPERAND (t, 0);
1152 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t, 1))
1157 && reorder_operands_p (TREE_OPERAND (t, 0),
1158 TREE_OPERAND (t, 1)))
1160 tem = negate_expr (TREE_OPERAND (t, 1));
1161 return fold_build2 (MINUS_EXPR, type,
1162 tem, TREE_OPERAND (t, 0));
1165 /* -(A + B) -> (-A) - B. */
1166 if (negate_expr_p (TREE_OPERAND (t, 0)))
1168 tem = negate_expr (TREE_OPERAND (t, 0));
1169 return fold_build2 (MINUS_EXPR, type,
1170 tem, TREE_OPERAND (t, 1));
1176 /* - (A - B) -> B - A */
1177 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1178 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1179 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1180 return fold_build2 (MINUS_EXPR, type,
1181 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1185 if (TYPE_UNSIGNED (type))
1191 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1193 tem = TREE_OPERAND (t, 1);
1194 if (negate_expr_p (tem))
1195 return fold_build2 (TREE_CODE (t), type,
1196 TREE_OPERAND (t, 0), negate_expr (tem));
1197 tem = TREE_OPERAND (t, 0);
1198 if (negate_expr_p (tem))
1199 return fold_build2 (TREE_CODE (t), type,
1200 negate_expr (tem), TREE_OPERAND (t, 1));
1204 case TRUNC_DIV_EXPR:
1205 case ROUND_DIV_EXPR:
1206 case FLOOR_DIV_EXPR:
1208 case EXACT_DIV_EXPR:
1209 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1211 tem = TREE_OPERAND (t, 1);
1212 if (negate_expr_p (tem))
1213 return fold_build2 (TREE_CODE (t), type,
1214 TREE_OPERAND (t, 0), negate_expr (tem));
1215 tem = TREE_OPERAND (t, 0);
1216 if (negate_expr_p (tem))
1217 return fold_build2 (TREE_CODE (t), type,
1218 negate_expr (tem), TREE_OPERAND (t, 1));
1223 /* Convert -((double)float) into (double)(-float). */
1224 if (TREE_CODE (type) == REAL_TYPE)
1226 tem = strip_float_extensions (t);
1227 if (tem != t && negate_expr_p (tem))
1228 return negate_expr (tem);
1233 /* Negate -f(x) as f(-x). */
1234 if (negate_mathfn_p (builtin_mathfn_code (t))
1235 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1237 tree fndecl, arg, arglist;
1239 fndecl = get_callee_fndecl (t);
1240 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1241 arglist = build_tree_list (NULL_TREE, arg);
1242 return build_function_call_expr (fndecl, arglist);
1247 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1248 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1250 tree op1 = TREE_OPERAND (t, 1);
1251 if (TREE_INT_CST_HIGH (op1) == 0
1252 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1253 == TREE_INT_CST_LOW (op1))
1255 tree ntype = TYPE_UNSIGNED (type)
1256 ? lang_hooks.types.signed_type (type)
1257 : lang_hooks.types.unsigned_type (type);
1258 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1259 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1260 return fold_convert (type, temp);
1272 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1273 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1274 return NULL_TREE. */
1277 negate_expr (tree t)
1284 type = TREE_TYPE (t);
1285 STRIP_SIGN_NOPS (t);
1287 tem = fold_negate_expr (t);
1289 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1290 return fold_convert (type, tem);
1293 /* Split a tree IN into a constant, literal and variable parts that could be
1294 combined with CODE to make IN. "constant" means an expression with
1295 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1296 commutative arithmetic operation. Store the constant part into *CONP,
1297 the literal in *LITP and return the variable part. If a part isn't
1298 present, set it to null. If the tree does not decompose in this way,
1299 return the entire tree as the variable part and the other parts as null.
1301 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1302 case, we negate an operand that was subtracted. Except if it is a
1303 literal for which we use *MINUS_LITP instead.
1305 If NEGATE_P is true, we are negating all of IN, again except a literal
1306 for which we use *MINUS_LITP instead.
1308 If IN is itself a literal or constant, return it as appropriate.
1310 Note that we do not guarantee that any of the three values will be the
1311 same type as IN, but they will have the same signedness and mode. */
1314 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1315 tree *minus_litp, int negate_p)
1323 /* Strip any conversions that don't change the machine mode or signedness. */
1324 STRIP_SIGN_NOPS (in);
1326 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1328 else if (TREE_CODE (in) == code
1329 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1330 /* We can associate addition and subtraction together (even
1331 though the C standard doesn't say so) for integers because
1332 the value is not affected. For reals, the value might be
1333 affected, so we can't. */
1334 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1335 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1337 tree op0 = TREE_OPERAND (in, 0);
1338 tree op1 = TREE_OPERAND (in, 1);
1339 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1340 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1342 /* First see if either of the operands is a literal, then a constant. */
1343 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1344 *litp = op0, op0 = 0;
1345 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1346 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1348 if (op0 != 0 && TREE_CONSTANT (op0))
1349 *conp = op0, op0 = 0;
1350 else if (op1 != 0 && TREE_CONSTANT (op1))
1351 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1353 /* If we haven't dealt with either operand, this is not a case we can
1354 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1355 if (op0 != 0 && op1 != 0)
1360 var = op1, neg_var_p = neg1_p;
1362 /* Now do any needed negations. */
1364 *minus_litp = *litp, *litp = 0;
1366 *conp = negate_expr (*conp);
1368 var = negate_expr (var);
1370 else if (TREE_CONSTANT (in))
1378 *minus_litp = *litp, *litp = 0;
1379 else if (*minus_litp)
1380 *litp = *minus_litp, *minus_litp = 0;
1381 *conp = negate_expr (*conp);
1382 var = negate_expr (var);
1388 /* Re-associate trees split by the above function. T1 and T2 are either
1389 expressions to associate or null. Return the new expression, if any. If
1390 we build an operation, do it in TYPE and with CODE. */
1393 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1400 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1401 try to fold this since we will have infinite recursion. But do
1402 deal with any NEGATE_EXPRs. */
1403 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1404 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1406 if (code == PLUS_EXPR)
1408 if (TREE_CODE (t1) == NEGATE_EXPR)
1409 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1410 fold_convert (type, TREE_OPERAND (t1, 0)));
1411 else if (TREE_CODE (t2) == NEGATE_EXPR)
1412 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1413 fold_convert (type, TREE_OPERAND (t2, 0)));
1414 else if (integer_zerop (t2))
1415 return fold_convert (type, t1);
1417 else if (code == MINUS_EXPR)
1419 if (integer_zerop (t2))
1420 return fold_convert (type, t1);
1423 return build2 (code, type, fold_convert (type, t1),
1424 fold_convert (type, t2));
1427 return fold_build2 (code, type, fold_convert (type, t1),
1428 fold_convert (type, t2));
1431 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1432 for use in int_const_binop, size_binop and size_diffop. */
1435 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1437 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1439 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1454 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1455 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1456 && TYPE_MODE (type1) == TYPE_MODE (type2);
1460 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1461 to produce a new constant. Return NULL_TREE if we don't know how
1462 to evaluate CODE at compile-time.
1464 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1467 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1469 unsigned HOST_WIDE_INT int1l, int2l;
1470 HOST_WIDE_INT int1h, int2h;
1471 unsigned HOST_WIDE_INT low;
1473 unsigned HOST_WIDE_INT garbagel;
1474 HOST_WIDE_INT garbageh;
1476 tree type = TREE_TYPE (arg1);
1477 int uns = TYPE_UNSIGNED (type);
1479 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1482 int1l = TREE_INT_CST_LOW (arg1);
1483 int1h = TREE_INT_CST_HIGH (arg1);
1484 int2l = TREE_INT_CST_LOW (arg2);
1485 int2h = TREE_INT_CST_HIGH (arg2);
1490 low = int1l | int2l, hi = int1h | int2h;
1494 low = int1l ^ int2l, hi = int1h ^ int2h;
1498 low = int1l & int2l, hi = int1h & int2h;
1504 /* It's unclear from the C standard whether shifts can overflow.
1505 The following code ignores overflow; perhaps a C standard
1506 interpretation ruling is needed. */
1507 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1514 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1519 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1523 neg_double (int2l, int2h, &low, &hi);
1524 add_double (int1l, int1h, low, hi, &low, &hi);
1525 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1529 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1532 case TRUNC_DIV_EXPR:
1533 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1534 case EXACT_DIV_EXPR:
1535 /* This is a shortcut for a common special case. */
1536 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1537 && !TREE_OVERFLOW (arg1)
1538 && !TREE_OVERFLOW (arg2)
1539 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1541 if (code == CEIL_DIV_EXPR)
1544 low = int1l / int2l, hi = 0;
1548 /* ... fall through ... */
1550 case ROUND_DIV_EXPR:
1551 if (int2h == 0 && int2l == 0)
1553 if (int2h == 0 && int2l == 1)
1555 low = int1l, hi = int1h;
1558 if (int1l == int2l && int1h == int2h
1559 && ! (int1l == 0 && int1h == 0))
1564 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1565 &low, &hi, &garbagel, &garbageh);
1568 case TRUNC_MOD_EXPR:
1569 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1570 /* This is a shortcut for a common special case. */
1571 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1572 && !TREE_OVERFLOW (arg1)
1573 && !TREE_OVERFLOW (arg2)
1574 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1576 if (code == CEIL_MOD_EXPR)
1578 low = int1l % int2l, hi = 0;
1582 /* ... fall through ... */
1584 case ROUND_MOD_EXPR:
1585 if (int2h == 0 && int2l == 0)
1587 overflow = div_and_round_double (code, uns,
1588 int1l, int1h, int2l, int2h,
1589 &garbagel, &garbageh, &low, &hi);
1595 low = (((unsigned HOST_WIDE_INT) int1h
1596 < (unsigned HOST_WIDE_INT) int2h)
1597 || (((unsigned HOST_WIDE_INT) int1h
1598 == (unsigned HOST_WIDE_INT) int2h)
1601 low = (int1h < int2h
1602 || (int1h == int2h && int1l < int2l));
1604 if (low == (code == MIN_EXPR))
1605 low = int1l, hi = int1h;
1607 low = int2l, hi = int2h;
1616 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1618 /* Propagate overflow flags ourselves. */
1619 if (((!uns || is_sizetype) && overflow)
1620 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1623 TREE_OVERFLOW (t) = 1;
1627 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1628 ((!uns || is_sizetype) && overflow)
1629 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1634 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1635 constant. We assume ARG1 and ARG2 have the same data type, or at least
1636 are the same kind of constant and the same machine mode. Return zero if
1637 combining the constants is not allowed in the current operating mode.
1639 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1642 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1644 /* Sanity check for the recursive cases. */
1651 if (TREE_CODE (arg1) == INTEGER_CST)
1652 return int_const_binop (code, arg1, arg2, notrunc);
1654 if (TREE_CODE (arg1) == REAL_CST)
1656 enum machine_mode mode;
1659 REAL_VALUE_TYPE value;
1660 REAL_VALUE_TYPE result;
1664 /* The following codes are handled by real_arithmetic. */
1679 d1 = TREE_REAL_CST (arg1);
1680 d2 = TREE_REAL_CST (arg2);
1682 type = TREE_TYPE (arg1);
1683 mode = TYPE_MODE (type);
1685 /* Don't perform operation if we honor signaling NaNs and
1686 either operand is a NaN. */
1687 if (HONOR_SNANS (mode)
1688 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1691 /* Don't perform operation if it would raise a division
1692 by zero exception. */
1693 if (code == RDIV_EXPR
1694 && REAL_VALUES_EQUAL (d2, dconst0)
1695 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1698 /* If either operand is a NaN, just return it. Otherwise, set up
1699 for floating-point trap; we return an overflow. */
1700 if (REAL_VALUE_ISNAN (d1))
1702 else if (REAL_VALUE_ISNAN (d2))
1705 inexact = real_arithmetic (&value, code, &d1, &d2);
1706 real_convert (&result, mode, &value);
1708 /* Don't constant fold this floating point operation if
1709 the result has overflowed and flag_trapping_math. */
1710 if (flag_trapping_math
1711 && MODE_HAS_INFINITIES (mode)
1712 && REAL_VALUE_ISINF (result)
1713 && !REAL_VALUE_ISINF (d1)
1714 && !REAL_VALUE_ISINF (d2))
1717 /* Don't constant fold this floating point operation if the
1718 result may dependent upon the run-time rounding mode and
1719 flag_rounding_math is set, or if GCC's software emulation
1720 is unable to accurately represent the result. */
1721 if ((flag_rounding_math
1722 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1723 && !flag_unsafe_math_optimizations))
1724 && (inexact || !real_identical (&result, &value)))
1727 t = build_real (type, result);
1729 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1733 if (TREE_CODE (arg1) == COMPLEX_CST)
1735 tree type = TREE_TYPE (arg1);
1736 tree r1 = TREE_REALPART (arg1);
1737 tree i1 = TREE_IMAGPART (arg1);
1738 tree r2 = TREE_REALPART (arg2);
1739 tree i2 = TREE_IMAGPART (arg2);
1746 real = const_binop (code, r1, r2, notrunc);
1747 imag = const_binop (code, i1, i2, notrunc);
1751 real = const_binop (MINUS_EXPR,
1752 const_binop (MULT_EXPR, r1, r2, notrunc),
1753 const_binop (MULT_EXPR, i1, i2, notrunc),
1755 imag = const_binop (PLUS_EXPR,
1756 const_binop (MULT_EXPR, r1, i2, notrunc),
1757 const_binop (MULT_EXPR, i1, r2, notrunc),
1764 = const_binop (PLUS_EXPR,
1765 const_binop (MULT_EXPR, r2, r2, notrunc),
1766 const_binop (MULT_EXPR, i2, i2, notrunc),
1769 = const_binop (PLUS_EXPR,
1770 const_binop (MULT_EXPR, r1, r2, notrunc),
1771 const_binop (MULT_EXPR, i1, i2, notrunc),
1774 = const_binop (MINUS_EXPR,
1775 const_binop (MULT_EXPR, i1, r2, notrunc),
1776 const_binop (MULT_EXPR, r1, i2, notrunc),
1779 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1780 code = TRUNC_DIV_EXPR;
1782 real = const_binop (code, t1, magsquared, notrunc);
1783 imag = const_binop (code, t2, magsquared, notrunc);
1792 return build_complex (type, real, imag);
1798 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1799 indicates which particular sizetype to create. */
1802 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1804 return build_int_cst (sizetype_tab[(int) kind], number);
1807 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1808 is a tree code. The type of the result is taken from the operands.
1809 Both must be equivalent integer types, ala int_binop_types_match_p.
1810 If the operands are constant, so is the result. */
1813 size_binop (enum tree_code code, tree arg0, tree arg1)
1815 tree type = TREE_TYPE (arg0);
1817 if (arg0 == error_mark_node || arg1 == error_mark_node)
1818 return error_mark_node;
1820 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1823 /* Handle the special case of two integer constants faster. */
1824 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1826 /* And some specific cases even faster than that. */
1827 if (code == PLUS_EXPR)
1829 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1831 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1834 else if (code == MINUS_EXPR)
1836 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1839 else if (code == MULT_EXPR)
1841 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1845 /* Handle general case of two integer constants. */
1846 return int_const_binop (code, arg0, arg1, 0);
1849 return fold_build2 (code, type, arg0, arg1);
1852 /* Given two values, either both of sizetype or both of bitsizetype,
1853 compute the difference between the two values. Return the value
1854 in signed type corresponding to the type of the operands. */
1857 size_diffop (tree arg0, tree arg1)
1859 tree type = TREE_TYPE (arg0);
1862 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1865 /* If the type is already signed, just do the simple thing. */
1866 if (!TYPE_UNSIGNED (type))
1867 return size_binop (MINUS_EXPR, arg0, arg1);
1869 if (type == sizetype)
1871 else if (type == bitsizetype)
1872 ctype = sbitsizetype;
1874 ctype = lang_hooks.types.signed_type (type);
1876 /* If either operand is not a constant, do the conversions to the signed
1877 type and subtract. The hardware will do the right thing with any
1878 overflow in the subtraction. */
1879 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1880 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1881 fold_convert (ctype, arg1));
1883 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1884 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1885 overflow) and negate (which can't either). Special-case a result
1886 of zero while we're here. */
1887 if (tree_int_cst_equal (arg0, arg1))
1888 return build_int_cst (ctype, 0);
1889 else if (tree_int_cst_lt (arg1, arg0))
1890 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1892 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1893 fold_convert (ctype, size_binop (MINUS_EXPR,
1897 /* A subroutine of fold_convert_const handling conversions of an
1898 INTEGER_CST to another integer type. */
1901 fold_convert_const_int_from_int (tree type, tree arg1)
1905 /* Given an integer constant, make new constant with new type,
1906 appropriately sign-extended or truncated. */
1907 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1908 TREE_INT_CST_HIGH (arg1),
1909 /* Don't set the overflow when
1910 converting a pointer */
1911 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1912 (TREE_INT_CST_HIGH (arg1) < 0
1913 && (TYPE_UNSIGNED (type)
1914 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1915 | TREE_OVERFLOW (arg1));
1920 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1921 to an integer type. */
1924 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1929 /* The following code implements the floating point to integer
1930 conversion rules required by the Java Language Specification,
1931 that IEEE NaNs are mapped to zero and values that overflow
1932 the target precision saturate, i.e. values greater than
1933 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1934 are mapped to INT_MIN. These semantics are allowed by the
1935 C and C++ standards that simply state that the behavior of
1936 FP-to-integer conversion is unspecified upon overflow. */
1938 HOST_WIDE_INT high, low;
1940 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1944 case FIX_TRUNC_EXPR:
1945 real_trunc (&r, VOIDmode, &x);
1952 /* If R is NaN, return zero and show we have an overflow. */
1953 if (REAL_VALUE_ISNAN (r))
1960 /* See if R is less than the lower bound or greater than the
1965 tree lt = TYPE_MIN_VALUE (type);
1966 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1967 if (REAL_VALUES_LESS (r, l))
1970 high = TREE_INT_CST_HIGH (lt);
1971 low = TREE_INT_CST_LOW (lt);
1977 tree ut = TYPE_MAX_VALUE (type);
1980 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1981 if (REAL_VALUES_LESS (u, r))
1984 high = TREE_INT_CST_HIGH (ut);
1985 low = TREE_INT_CST_LOW (ut);
1991 REAL_VALUE_TO_INT (&low, &high, r);
1993 t = force_fit_type_double (type, low, high, -1,
1994 overflow | TREE_OVERFLOW (arg1));
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999 to another floating point type. */
2002 fold_convert_const_real_from_real (tree type, tree arg1)
2004 REAL_VALUE_TYPE value;
2007 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2008 t = build_real (type, value);
2010 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2014 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2015 type TYPE. If no simplification can be done return NULL_TREE. */
2018 fold_convert_const (enum tree_code code, tree type, tree arg1)
2020 if (TREE_TYPE (arg1) == type)
2023 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2025 if (TREE_CODE (arg1) == INTEGER_CST)
2026 return fold_convert_const_int_from_int (type, arg1);
2027 else if (TREE_CODE (arg1) == REAL_CST)
2028 return fold_convert_const_int_from_real (code, type, arg1);
2030 else if (TREE_CODE (type) == REAL_TYPE)
2032 if (TREE_CODE (arg1) == INTEGER_CST)
2033 return build_real_from_int_cst (type, arg1);
2034 if (TREE_CODE (arg1) == REAL_CST)
2035 return fold_convert_const_real_from_real (type, arg1);
2040 /* Construct a vector of zero elements of vector type TYPE. */
2043 build_zero_vector (tree type)
2048 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2049 units = TYPE_VECTOR_SUBPARTS (type);
2052 for (i = 0; i < units; i++)
2053 list = tree_cons (NULL_TREE, elem, list);
2054 return build_vector (type, list);
2057 /* Convert expression ARG to type TYPE. Used by the middle-end for
2058 simple conversions in preference to calling the front-end's convert. */
2061 fold_convert (tree type, tree arg)
2063 tree orig = TREE_TYPE (arg);
2069 if (TREE_CODE (arg) == ERROR_MARK
2070 || TREE_CODE (type) == ERROR_MARK
2071 || TREE_CODE (orig) == ERROR_MARK)
2072 return error_mark_node;
2074 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2075 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2076 TYPE_MAIN_VARIANT (orig)))
2077 return fold_build1 (NOP_EXPR, type, arg);
2079 switch (TREE_CODE (type))
2081 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2082 case POINTER_TYPE: case REFERENCE_TYPE:
2084 if (TREE_CODE (arg) == INTEGER_CST)
2086 tem = fold_convert_const (NOP_EXPR, type, arg);
2087 if (tem != NULL_TREE)
2090 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2091 || TREE_CODE (orig) == OFFSET_TYPE)
2092 return fold_build1 (NOP_EXPR, type, arg);
2093 if (TREE_CODE (orig) == COMPLEX_TYPE)
2095 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2096 return fold_convert (type, tem);
2098 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2099 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2100 return fold_build1 (NOP_EXPR, type, arg);
2103 if (TREE_CODE (arg) == INTEGER_CST)
2105 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2106 if (tem != NULL_TREE)
2109 else if (TREE_CODE (arg) == REAL_CST)
2111 tem = fold_convert_const (NOP_EXPR, type, arg);
2112 if (tem != NULL_TREE)
2116 switch (TREE_CODE (orig))
2119 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2120 case POINTER_TYPE: case REFERENCE_TYPE:
2121 return fold_build1 (FLOAT_EXPR, type, arg);
2124 return fold_build1 (NOP_EXPR, type, arg);
2127 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2128 return fold_convert (type, tem);
2135 switch (TREE_CODE (orig))
2138 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2139 case POINTER_TYPE: case REFERENCE_TYPE:
2141 return build2 (COMPLEX_EXPR, type,
2142 fold_convert (TREE_TYPE (type), arg),
2143 fold_convert (TREE_TYPE (type), integer_zero_node));
2148 if (TREE_CODE (arg) == COMPLEX_EXPR)
2150 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2151 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2152 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2155 arg = save_expr (arg);
2156 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2157 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2158 rpart = fold_convert (TREE_TYPE (type), rpart);
2159 ipart = fold_convert (TREE_TYPE (type), ipart);
2160 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2168 if (integer_zerop (arg))
2169 return build_zero_vector (type);
2170 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2171 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2172 || TREE_CODE (orig) == VECTOR_TYPE);
2173 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2176 tem = fold_ignored_result (arg);
2177 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2179 return fold_build1 (NOP_EXPR, type, tem);
2186 /* Return false if expr can be assumed not to be an lvalue, true
2190 maybe_lvalue_p (tree x)
2192 /* We only need to wrap lvalue tree codes. */
2193 switch (TREE_CODE (x))
2204 case ALIGN_INDIRECT_REF:
2205 case MISALIGNED_INDIRECT_REF:
2207 case ARRAY_RANGE_REF:
2213 case PREINCREMENT_EXPR:
2214 case PREDECREMENT_EXPR:
2216 case TRY_CATCH_EXPR:
2217 case WITH_CLEANUP_EXPR:
2220 case GIMPLE_MODIFY_STMT:
2229 /* Assume the worst for front-end tree codes. */
2230 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2238 /* Return an expr equal to X but certainly not valid as an lvalue. */
2243 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2248 if (! maybe_lvalue_p (x))
2250 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2253 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2254 Zero means allow extended lvalues. */
2256 int pedantic_lvalues;
2258 /* When pedantic, return an expr equal to X but certainly not valid as a
2259 pedantic lvalue. Otherwise, return X. */
2262 pedantic_non_lvalue (tree x)
2264 if (pedantic_lvalues)
2265 return non_lvalue (x);
2270 /* Given a tree comparison code, return the code that is the logical inverse
2271 of the given code. It is not safe to do this for floating-point
2272 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2273 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2276 invert_tree_comparison (enum tree_code code, bool honor_nans)
2278 if (honor_nans && flag_trapping_math)
2288 return honor_nans ? UNLE_EXPR : LE_EXPR;
2290 return honor_nans ? UNLT_EXPR : LT_EXPR;
2292 return honor_nans ? UNGE_EXPR : GE_EXPR;
2294 return honor_nans ? UNGT_EXPR : GT_EXPR;
2308 return UNORDERED_EXPR;
2309 case UNORDERED_EXPR:
2310 return ORDERED_EXPR;
2316 /* Similar, but return the comparison that results if the operands are
2317 swapped. This is safe for floating-point. */
2320 swap_tree_comparison (enum tree_code code)
2327 case UNORDERED_EXPR:
2353 /* Convert a comparison tree code from an enum tree_code representation
2354 into a compcode bit-based encoding. This function is the inverse of
2355 compcode_to_comparison. */
2357 static enum comparison_code
2358 comparison_to_compcode (enum tree_code code)
2375 return COMPCODE_ORD;
2376 case UNORDERED_EXPR:
2377 return COMPCODE_UNORD;
2379 return COMPCODE_UNLT;
2381 return COMPCODE_UNEQ;
2383 return COMPCODE_UNLE;
2385 return COMPCODE_UNGT;
2387 return COMPCODE_LTGT;
2389 return COMPCODE_UNGE;
2395 /* Convert a compcode bit-based encoding of a comparison operator back
2396 to GCC's enum tree_code representation. This function is the
2397 inverse of comparison_to_compcode. */
2399 static enum tree_code
2400 compcode_to_comparison (enum comparison_code code)
2417 return ORDERED_EXPR;
2418 case COMPCODE_UNORD:
2419 return UNORDERED_EXPR;
2437 /* Return a tree for the comparison which is the combination of
2438 doing the AND or OR (depending on CODE) of the two operations LCODE
2439 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2440 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2441 if this makes the transformation invalid. */
2444 combine_comparisons (enum tree_code code, enum tree_code lcode,
2445 enum tree_code rcode, tree truth_type,
2446 tree ll_arg, tree lr_arg)
2448 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2449 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2450 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2451 enum comparison_code compcode;
2455 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2456 compcode = lcompcode & rcompcode;
2459 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2460 compcode = lcompcode | rcompcode;
2469 /* Eliminate unordered comparisons, as well as LTGT and ORD
2470 which are not used unless the mode has NaNs. */
2471 compcode &= ~COMPCODE_UNORD;
2472 if (compcode == COMPCODE_LTGT)
2473 compcode = COMPCODE_NE;
2474 else if (compcode == COMPCODE_ORD)
2475 compcode = COMPCODE_TRUE;
2477 else if (flag_trapping_math)
2479 /* Check that the original operation and the optimized ones will trap
2480 under the same condition. */
2481 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2482 && (lcompcode != COMPCODE_EQ)
2483 && (lcompcode != COMPCODE_ORD);
2484 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2485 && (rcompcode != COMPCODE_EQ)
2486 && (rcompcode != COMPCODE_ORD);
2487 bool trap = (compcode & COMPCODE_UNORD) == 0
2488 && (compcode != COMPCODE_EQ)
2489 && (compcode != COMPCODE_ORD);
2491 /* In a short-circuited boolean expression the LHS might be
2492 such that the RHS, if evaluated, will never trap. For
2493 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2494 if neither x nor y is NaN. (This is a mixed blessing: for
2495 example, the expression above will never trap, hence
2496 optimizing it to x < y would be invalid). */
2497 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2498 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2501 /* If the comparison was short-circuited, and only the RHS
2502 trapped, we may now generate a spurious trap. */
2504 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2507 /* If we changed the conditions that cause a trap, we lose. */
2508 if ((ltrap || rtrap) != trap)
2512 if (compcode == COMPCODE_TRUE)
2513 return constant_boolean_node (true, truth_type);
2514 else if (compcode == COMPCODE_FALSE)
2515 return constant_boolean_node (false, truth_type);
2517 return fold_build2 (compcode_to_comparison (compcode),
2518 truth_type, ll_arg, lr_arg);
2521 /* Return nonzero if CODE is a tree code that represents a truth value. */
2524 truth_value_p (enum tree_code code)
2526 return (TREE_CODE_CLASS (code) == tcc_comparison
2527 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2528 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2529 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2532 /* Return nonzero if two operands (typically of the same tree node)
2533 are necessarily equal. If either argument has side-effects this
2534 function returns zero. FLAGS modifies behavior as follows:
2536 If OEP_ONLY_CONST is set, only return nonzero for constants.
2537 This function tests whether the operands are indistinguishable;
2538 it does not test whether they are equal using C's == operation.
2539 The distinction is important for IEEE floating point, because
2540 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2541 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2543 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2544 even though it may hold multiple values during a function.
2545 This is because a GCC tree node guarantees that nothing else is
2546 executed between the evaluation of its "operands" (which may often
2547 be evaluated in arbitrary order). Hence if the operands themselves
2548 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2549 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2550 unset means assuming isochronic (or instantaneous) tree equivalence.
2551 Unless comparing arbitrary expression trees, such as from different
2552 statements, this flag can usually be left unset.
2554 If OEP_PURE_SAME is set, then pure functions with identical arguments
2555 are considered the same. It is used when the caller has other ways
2556 to ensure that global memory is unchanged in between. */
2559 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2561 /* If either is ERROR_MARK, they aren't equal. */
2562 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2565 /* If both types don't have the same signedness, then we can't consider
2566 them equal. We must check this before the STRIP_NOPS calls
2567 because they may change the signedness of the arguments. */
2568 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2571 /* If both types don't have the same precision, then it is not safe
2573 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2579 /* In case both args are comparisons but with different comparison
2580 code, try to swap the comparison operands of one arg to produce
2581 a match and compare that variant. */
2582 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2583 && COMPARISON_CLASS_P (arg0)
2584 && COMPARISON_CLASS_P (arg1))
2586 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2588 if (TREE_CODE (arg0) == swap_code)
2589 return operand_equal_p (TREE_OPERAND (arg0, 0),
2590 TREE_OPERAND (arg1, 1), flags)
2591 && operand_equal_p (TREE_OPERAND (arg0, 1),
2592 TREE_OPERAND (arg1, 0), flags);
2595 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2596 /* This is needed for conversions and for COMPONENT_REF.
2597 Might as well play it safe and always test this. */
2598 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2599 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2600 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2603 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2604 We don't care about side effects in that case because the SAVE_EXPR
2605 takes care of that for us. In all other cases, two expressions are
2606 equal if they have no side effects. If we have two identical
2607 expressions with side effects that should be treated the same due
2608 to the only side effects being identical SAVE_EXPR's, that will
2609 be detected in the recursive calls below. */
2610 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2611 && (TREE_CODE (arg0) == SAVE_EXPR
2612 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2615 /* Next handle constant cases, those for which we can return 1 even
2616 if ONLY_CONST is set. */
2617 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2618 switch (TREE_CODE (arg0))
2621 return tree_int_cst_equal (arg0, arg1);
2624 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2625 TREE_REAL_CST (arg1)))
2629 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2631 /* If we do not distinguish between signed and unsigned zero,
2632 consider them equal. */
2633 if (real_zerop (arg0) && real_zerop (arg1))
2642 v1 = TREE_VECTOR_CST_ELTS (arg0);
2643 v2 = TREE_VECTOR_CST_ELTS (arg1);
2646 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2649 v1 = TREE_CHAIN (v1);
2650 v2 = TREE_CHAIN (v2);
2657 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2659 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2663 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2664 && ! memcmp (TREE_STRING_POINTER (arg0),
2665 TREE_STRING_POINTER (arg1),
2666 TREE_STRING_LENGTH (arg0)));
2669 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2675 if (flags & OEP_ONLY_CONST)
2678 /* Define macros to test an operand from arg0 and arg1 for equality and a
2679 variant that allows null and views null as being different from any
2680 non-null value. In the latter case, if either is null, the both
2681 must be; otherwise, do the normal comparison. */
2682 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2683 TREE_OPERAND (arg1, N), flags)
2685 #define OP_SAME_WITH_NULL(N) \
2686 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2687 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2689 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2692 /* Two conversions are equal only if signedness and modes match. */
2693 switch (TREE_CODE (arg0))
2697 case FIX_TRUNC_EXPR:
2698 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2699 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2709 case tcc_comparison:
2711 if (OP_SAME (0) && OP_SAME (1))
2714 /* For commutative ops, allow the other order. */
2715 return (commutative_tree_code (TREE_CODE (arg0))
2716 && operand_equal_p (TREE_OPERAND (arg0, 0),
2717 TREE_OPERAND (arg1, 1), flags)
2718 && operand_equal_p (TREE_OPERAND (arg0, 1),
2719 TREE_OPERAND (arg1, 0), flags));
2722 /* If either of the pointer (or reference) expressions we are
2723 dereferencing contain a side effect, these cannot be equal. */
2724 if (TREE_SIDE_EFFECTS (arg0)
2725 || TREE_SIDE_EFFECTS (arg1))
2728 switch (TREE_CODE (arg0))
2731 case ALIGN_INDIRECT_REF:
2732 case MISALIGNED_INDIRECT_REF:
2738 case ARRAY_RANGE_REF:
2739 /* Operands 2 and 3 may be null. */
2742 && OP_SAME_WITH_NULL (2)
2743 && OP_SAME_WITH_NULL (3));
2746 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2747 may be NULL when we're called to compare MEM_EXPRs. */
2748 return OP_SAME_WITH_NULL (0)
2750 && OP_SAME_WITH_NULL (2);
2753 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2759 case tcc_expression:
2760 switch (TREE_CODE (arg0))
2763 case TRUTH_NOT_EXPR:
2766 case TRUTH_ANDIF_EXPR:
2767 case TRUTH_ORIF_EXPR:
2768 return OP_SAME (0) && OP_SAME (1);
2770 case TRUTH_AND_EXPR:
2772 case TRUTH_XOR_EXPR:
2773 if (OP_SAME (0) && OP_SAME (1))
2776 /* Otherwise take into account this is a commutative operation. */
2777 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2778 TREE_OPERAND (arg1, 1), flags)
2779 && operand_equal_p (TREE_OPERAND (arg0, 1),
2780 TREE_OPERAND (arg1, 0), flags));
2783 /* If the CALL_EXPRs call different functions, then they
2784 clearly can not be equal. */
2789 unsigned int cef = call_expr_flags (arg0);
2790 if (flags & OEP_PURE_SAME)
2791 cef &= ECF_CONST | ECF_PURE;
2798 /* Now see if all the arguments are the same. operand_equal_p
2799 does not handle TREE_LIST, so we walk the operands here
2800 feeding them to operand_equal_p. */
2801 arg0 = TREE_OPERAND (arg0, 1);
2802 arg1 = TREE_OPERAND (arg1, 1);
2803 while (arg0 && arg1)
2805 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2809 arg0 = TREE_CHAIN (arg0);
2810 arg1 = TREE_CHAIN (arg1);
2813 /* If we get here and both argument lists are exhausted
2814 then the CALL_EXPRs are equal. */
2815 return ! (arg0 || arg1);
2821 case tcc_declaration:
2822 /* Consider __builtin_sqrt equal to sqrt. */
2823 return (TREE_CODE (arg0) == FUNCTION_DECL
2824 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2825 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2826 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2833 #undef OP_SAME_WITH_NULL
2836 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2837 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2839 When in doubt, return 0. */
2842 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2844 int unsignedp1, unsignedpo;
2845 tree primarg0, primarg1, primother;
2846 unsigned int correct_width;
2848 if (operand_equal_p (arg0, arg1, 0))
2851 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2852 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2855 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2856 and see if the inner values are the same. This removes any
2857 signedness comparison, which doesn't matter here. */
2858 primarg0 = arg0, primarg1 = arg1;
2859 STRIP_NOPS (primarg0);
2860 STRIP_NOPS (primarg1);
2861 if (operand_equal_p (primarg0, primarg1, 0))
2864 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2865 actual comparison operand, ARG0.
2867 First throw away any conversions to wider types
2868 already present in the operands. */
2870 primarg1 = get_narrower (arg1, &unsignedp1);
2871 primother = get_narrower (other, &unsignedpo);
2873 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2874 if (unsignedp1 == unsignedpo
2875 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2876 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2878 tree type = TREE_TYPE (arg0);
2880 /* Make sure shorter operand is extended the right way
2881 to match the longer operand. */
2882 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2883 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2885 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2892 /* See if ARG is an expression that is either a comparison or is performing
2893 arithmetic on comparisons. The comparisons must only be comparing
2894 two different values, which will be stored in *CVAL1 and *CVAL2; if
2895 they are nonzero it means that some operands have already been found.
2896 No variables may be used anywhere else in the expression except in the
2897 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2898 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2900 If this is true, return 1. Otherwise, return zero. */
2903 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2905 enum tree_code code = TREE_CODE (arg);
2906 enum tree_code_class class = TREE_CODE_CLASS (code);
2908 /* We can handle some of the tcc_expression cases here. */
2909 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2911 else if (class == tcc_expression
2912 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2913 || code == COMPOUND_EXPR))
2916 else if (class == tcc_expression && code == SAVE_EXPR
2917 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2919 /* If we've already found a CVAL1 or CVAL2, this expression is
2920 two complex to handle. */
2921 if (*cval1 || *cval2)
2931 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2934 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2935 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2936 cval1, cval2, save_p));
2941 case tcc_expression:
2942 if (code == COND_EXPR)
2943 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2944 cval1, cval2, save_p)
2945 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2946 cval1, cval2, save_p)
2947 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2948 cval1, cval2, save_p));
2951 case tcc_comparison:
2952 /* First see if we can handle the first operand, then the second. For
2953 the second operand, we know *CVAL1 can't be zero. It must be that
2954 one side of the comparison is each of the values; test for the
2955 case where this isn't true by failing if the two operands
2958 if (operand_equal_p (TREE_OPERAND (arg, 0),
2959 TREE_OPERAND (arg, 1), 0))
2963 *cval1 = TREE_OPERAND (arg, 0);
2964 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2966 else if (*cval2 == 0)
2967 *cval2 = TREE_OPERAND (arg, 0);
2968 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2973 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2975 else if (*cval2 == 0)
2976 *cval2 = TREE_OPERAND (arg, 1);
2977 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2989 /* ARG is a tree that is known to contain just arithmetic operations and
2990 comparisons. Evaluate the operations in the tree substituting NEW0 for
2991 any occurrence of OLD0 as an operand of a comparison and likewise for
2995 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2997 tree type = TREE_TYPE (arg);
2998 enum tree_code code = TREE_CODE (arg);
2999 enum tree_code_class class = TREE_CODE_CLASS (code);
3001 /* We can handle some of the tcc_expression cases here. */
3002 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3004 else if (class == tcc_expression
3005 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3011 return fold_build1 (code, type,
3012 eval_subst (TREE_OPERAND (arg, 0),
3013 old0, new0, old1, new1));
3016 return fold_build2 (code, type,
3017 eval_subst (TREE_OPERAND (arg, 0),
3018 old0, new0, old1, new1),
3019 eval_subst (TREE_OPERAND (arg, 1),
3020 old0, new0, old1, new1));
3022 case tcc_expression:
3026 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3029 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3032 return fold_build3 (code, type,
3033 eval_subst (TREE_OPERAND (arg, 0),
3034 old0, new0, old1, new1),
3035 eval_subst (TREE_OPERAND (arg, 1),
3036 old0, new0, old1, new1),
3037 eval_subst (TREE_OPERAND (arg, 2),
3038 old0, new0, old1, new1));
3042 /* Fall through - ??? */
3044 case tcc_comparison:
3046 tree arg0 = TREE_OPERAND (arg, 0);
3047 tree arg1 = TREE_OPERAND (arg, 1);
3049 /* We need to check both for exact equality and tree equality. The
3050 former will be true if the operand has a side-effect. In that
3051 case, we know the operand occurred exactly once. */
3053 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3055 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3058 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3060 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3063 return fold_build2 (code, type, arg0, arg1);
3071 /* Return a tree for the case when the result of an expression is RESULT
3072 converted to TYPE and OMITTED was previously an operand of the expression
3073 but is now not needed (e.g., we folded OMITTED * 0).
3075 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3076 the conversion of RESULT to TYPE. */
3079 omit_one_operand (tree type, tree result, tree omitted)
3081 tree t = fold_convert (type, result);
3083 if (TREE_SIDE_EFFECTS (omitted))
3084 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3086 return non_lvalue (t);
3089 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3092 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3094 tree t = fold_convert (type, result);
3096 if (TREE_SIDE_EFFECTS (omitted))
3097 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3099 return pedantic_non_lvalue (t);
3102 /* Return a tree for the case when the result of an expression is RESULT
3103 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3104 of the expression but are now not needed.
3106 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3107 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3108 evaluated before OMITTED2. Otherwise, if neither has side effects,
3109 just do the conversion of RESULT to TYPE. */
3112 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3114 tree t = fold_convert (type, result);
3116 if (TREE_SIDE_EFFECTS (omitted2))
3117 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3118 if (TREE_SIDE_EFFECTS (omitted1))
3119 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3121 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3125 /* Return a simplified tree node for the truth-negation of ARG. This
3126 never alters ARG itself. We assume that ARG is an operation that
3127 returns a truth value (0 or 1).
3129 FIXME: one would think we would fold the result, but it causes
3130 problems with the dominator optimizer. */
3133 fold_truth_not_expr (tree arg)
3135 tree type = TREE_TYPE (arg);
3136 enum tree_code code = TREE_CODE (arg);
3138 /* If this is a comparison, we can simply invert it, except for
3139 floating-point non-equality comparisons, in which case we just
3140 enclose a TRUTH_NOT_EXPR around what we have. */
3142 if (TREE_CODE_CLASS (code) == tcc_comparison)
3144 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3145 if (FLOAT_TYPE_P (op_type)
3146 && flag_trapping_math
3147 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3148 && code != NE_EXPR && code != EQ_EXPR)
3152 code = invert_tree_comparison (code,
3153 HONOR_NANS (TYPE_MODE (op_type)));
3154 if (code == ERROR_MARK)
3157 return build2 (code, type,
3158 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3165 return constant_boolean_node (integer_zerop (arg), type);
3167 case TRUTH_AND_EXPR:
3168 return build2 (TRUTH_OR_EXPR, type,
3169 invert_truthvalue (TREE_OPERAND (arg, 0)),
3170 invert_truthvalue (TREE_OPERAND (arg, 1)));
3173 return build2 (TRUTH_AND_EXPR, type,
3174 invert_truthvalue (TREE_OPERAND (arg, 0)),
3175 invert_truthvalue (TREE_OPERAND (arg, 1)));
3177 case TRUTH_XOR_EXPR:
3178 /* Here we can invert either operand. We invert the first operand
3179 unless the second operand is a TRUTH_NOT_EXPR in which case our
3180 result is the XOR of the first operand with the inside of the
3181 negation of the second operand. */
3183 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3184 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3185 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3187 return build2 (TRUTH_XOR_EXPR, type,
3188 invert_truthvalue (TREE_OPERAND (arg, 0)),
3189 TREE_OPERAND (arg, 1));
3191 case TRUTH_ANDIF_EXPR:
3192 return build2 (TRUTH_ORIF_EXPR, type,
3193 invert_truthvalue (TREE_OPERAND (arg, 0)),
3194 invert_truthvalue (TREE_OPERAND (arg, 1)));
3196 case TRUTH_ORIF_EXPR:
3197 return build2 (TRUTH_ANDIF_EXPR, type,
3198 invert_truthvalue (TREE_OPERAND (arg, 0)),
3199 invert_truthvalue (TREE_OPERAND (arg, 1)));
3201 case TRUTH_NOT_EXPR:
3202 return TREE_OPERAND (arg, 0);
3206 tree arg1 = TREE_OPERAND (arg, 1);
3207 tree arg2 = TREE_OPERAND (arg, 2);
3208 /* A COND_EXPR may have a throw as one operand, which
3209 then has void type. Just leave void operands
3211 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3212 VOID_TYPE_P (TREE_TYPE (arg1))
3213 ? arg1 : invert_truthvalue (arg1),
3214 VOID_TYPE_P (TREE_TYPE (arg2))
3215 ? arg2 : invert_truthvalue (arg2));
3219 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3220 invert_truthvalue (TREE_OPERAND (arg, 1)));
3222 case NON_LVALUE_EXPR:
3223 return invert_truthvalue (TREE_OPERAND (arg, 0));
3226 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3227 return build1 (TRUTH_NOT_EXPR, type, arg);
3231 return build1 (TREE_CODE (arg), type,
3232 invert_truthvalue (TREE_OPERAND (arg, 0)));
3235 if (!integer_onep (TREE_OPERAND (arg, 1)))
3237 return build2 (EQ_EXPR, type, arg,
3238 build_int_cst (type, 0));
3241 return build1 (TRUTH_NOT_EXPR, type, arg);
3243 case CLEANUP_POINT_EXPR:
3244 return build1 (CLEANUP_POINT_EXPR, type,
3245 invert_truthvalue (TREE_OPERAND (arg, 0)));
3254 /* Return a simplified tree node for the truth-negation of ARG. This
3255 never alters ARG itself. We assume that ARG is an operation that
3256 returns a truth value (0 or 1).
3258 FIXME: one would think we would fold the result, but it causes
3259 problems with the dominator optimizer. */
3262 invert_truthvalue (tree arg)
3266 if (TREE_CODE (arg) == ERROR_MARK)
3269 tem = fold_truth_not_expr (arg);
3271 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3276 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3277 operands are another bit-wise operation with a common input. If so,
3278 distribute the bit operations to save an operation and possibly two if
3279 constants are involved. For example, convert
3280 (A | B) & (A | C) into A | (B & C)
3281 Further simplification will occur if B and C are constants.
3283 If this optimization cannot be done, 0 will be returned. */
3286 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3291 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3292 || TREE_CODE (arg0) == code
3293 || (TREE_CODE (arg0) != BIT_AND_EXPR
3294 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3297 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3299 common = TREE_OPERAND (arg0, 0);
3300 left = TREE_OPERAND (arg0, 1);
3301 right = TREE_OPERAND (arg1, 1);
3303 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3305 common = TREE_OPERAND (arg0, 0);
3306 left = TREE_OPERAND (arg0, 1);
3307 right = TREE_OPERAND (arg1, 0);
3309 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3311 common = TREE_OPERAND (arg0, 1);
3312 left = TREE_OPERAND (arg0, 0);
3313 right = TREE_OPERAND (arg1, 1);
3315 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3317 common = TREE_OPERAND (arg0, 1);
3318 left = TREE_OPERAND (arg0, 0);
3319 right = TREE_OPERAND (arg1, 0);
3324 return fold_build2 (TREE_CODE (arg0), type, common,
3325 fold_build2 (code, type, left, right));
3328 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3329 with code CODE. This optimization is unsafe. */
3331 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3333 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3334 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3336 /* (A / C) +- (B / C) -> (A +- B) / C. */
3338 && operand_equal_p (TREE_OPERAND (arg0, 1),
3339 TREE_OPERAND (arg1, 1), 0))
3340 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3341 fold_build2 (code, type,
3342 TREE_OPERAND (arg0, 0),
3343 TREE_OPERAND (arg1, 0)),
3344 TREE_OPERAND (arg0, 1));
3346 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3347 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3348 TREE_OPERAND (arg1, 0), 0)
3349 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3350 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3352 REAL_VALUE_TYPE r0, r1;
3353 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3354 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3356 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3358 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3359 real_arithmetic (&r0, code, &r0, &r1);
3360 return fold_build2 (MULT_EXPR, type,
3361 TREE_OPERAND (arg0, 0),
3362 build_real (type, r0));
3368 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3369 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3372 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3379 tree size = TYPE_SIZE (TREE_TYPE (inner));
3380 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3381 || POINTER_TYPE_P (TREE_TYPE (inner)))
3382 && host_integerp (size, 0)
3383 && tree_low_cst (size, 0) == bitsize)
3384 return fold_convert (type, inner);
3387 result = build3 (BIT_FIELD_REF, type, inner,
3388 size_int (bitsize), bitsize_int (bitpos));
3390 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3395 /* Optimize a bit-field compare.
3397 There are two cases: First is a compare against a constant and the
3398 second is a comparison of two items where the fields are at the same
3399 bit position relative to the start of a chunk (byte, halfword, word)
3400 large enough to contain it. In these cases we can avoid the shift
3401 implicit in bitfield extractions.
3403 For constants, we emit a compare of the shifted constant with the
3404 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3405 compared. For two fields at the same position, we do the ANDs with the
3406 similar mask and compare the result of the ANDs.
3408 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3409 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3410 are the left and right operands of the comparison, respectively.
3412 If the optimization described above can be done, we return the resulting
3413 tree. Otherwise we return zero. */
3416 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3419 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3420 tree type = TREE_TYPE (lhs);
3421 tree signed_type, unsigned_type;
3422 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3423 enum machine_mode lmode, rmode, nmode;
3424 int lunsignedp, runsignedp;
3425 int lvolatilep = 0, rvolatilep = 0;
3426 tree linner, rinner = NULL_TREE;
3430 /* Get all the information about the extractions being done. If the bit size
3431 if the same as the size of the underlying object, we aren't doing an
3432 extraction at all and so can do nothing. We also don't want to
3433 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3434 then will no longer be able to replace it. */
3435 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3436 &lunsignedp, &lvolatilep, false);
3437 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3438 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3443 /* If this is not a constant, we can only do something if bit positions,
3444 sizes, and signedness are the same. */
3445 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3446 &runsignedp, &rvolatilep, false);
3448 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3449 || lunsignedp != runsignedp || offset != 0
3450 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3454 /* See if we can find a mode to refer to this field. We should be able to,
3455 but fail if we can't. */
3456 nmode = get_best_mode (lbitsize, lbitpos,
3457 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3458 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3459 TYPE_ALIGN (TREE_TYPE (rinner))),
3460 word_mode, lvolatilep || rvolatilep);
3461 if (nmode == VOIDmode)
3464 /* Set signed and unsigned types of the precision of this mode for the
3466 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3467 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3469 /* Compute the bit position and size for the new reference and our offset
3470 within it. If the new reference is the same size as the original, we
3471 won't optimize anything, so return zero. */
3472 nbitsize = GET_MODE_BITSIZE (nmode);
3473 nbitpos = lbitpos & ~ (nbitsize - 1);
3475 if (nbitsize == lbitsize)
3478 if (BYTES_BIG_ENDIAN)
3479 lbitpos = nbitsize - lbitsize - lbitpos;
3481 /* Make the mask to be used against the extracted field. */
3482 mask = build_int_cst_type (unsigned_type, -1);
3483 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3484 mask = const_binop (RSHIFT_EXPR, mask,
3485 size_int (nbitsize - lbitsize - lbitpos), 0);
3488 /* If not comparing with constant, just rework the comparison
3490 return fold_build2 (code, compare_type,
3491 fold_build2 (BIT_AND_EXPR, unsigned_type,
3492 make_bit_field_ref (linner,
3497 fold_build2 (BIT_AND_EXPR, unsigned_type,
3498 make_bit_field_ref (rinner,
3504 /* Otherwise, we are handling the constant case. See if the constant is too
3505 big for the field. Warn and return a tree of for 0 (false) if so. We do
3506 this not only for its own sake, but to avoid having to test for this
3507 error case below. If we didn't, we might generate wrong code.
3509 For unsigned fields, the constant shifted right by the field length should
3510 be all zero. For signed fields, the high-order bits should agree with
3515 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3516 fold_convert (unsigned_type, rhs),
3517 size_int (lbitsize), 0)))
3519 warning (0, "comparison is always %d due to width of bit-field",
3521 return constant_boolean_node (code == NE_EXPR, compare_type);
3526 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3527 size_int (lbitsize - 1), 0);
3528 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3530 warning (0, "comparison is always %d due to width of bit-field",
3532 return constant_boolean_node (code == NE_EXPR, compare_type);
3536 /* Single-bit compares should always be against zero. */
3537 if (lbitsize == 1 && ! integer_zerop (rhs))
3539 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3540 rhs = build_int_cst (type, 0);
3543 /* Make a new bitfield reference, shift the constant over the
3544 appropriate number of bits and mask it with the computed mask
3545 (in case this was a signed field). If we changed it, make a new one. */
3546 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3549 TREE_SIDE_EFFECTS (lhs) = 1;
3550 TREE_THIS_VOLATILE (lhs) = 1;
3553 rhs = const_binop (BIT_AND_EXPR,
3554 const_binop (LSHIFT_EXPR,
3555 fold_convert (unsigned_type, rhs),
3556 size_int (lbitpos), 0),
3559 return build2 (code, compare_type,
3560 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3564 /* Subroutine for fold_truthop: decode a field reference.
3566 If EXP is a comparison reference, we return the innermost reference.
3568 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3569 set to the starting bit number.
3571 If the innermost field can be completely contained in a mode-sized
3572 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3574 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3575 otherwise it is not changed.
3577 *PUNSIGNEDP is set to the signedness of the field.
3579 *PMASK is set to the mask used. This is either contained in a
3580 BIT_AND_EXPR or derived from the width of the field.
3582 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3584 Return 0 if this is not a component reference or is one that we can't
3585 do anything with. */
3588 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3589 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3590 int *punsignedp, int *pvolatilep,
3591 tree *pmask, tree *pand_mask)
3593 tree outer_type = 0;
3595 tree mask, inner, offset;
3597 unsigned int precision;
3599 /* All the optimizations using this function assume integer fields.
3600 There are problems with FP fields since the type_for_size call
3601 below can fail for, e.g., XFmode. */
3602 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3605 /* We are interested in the bare arrangement of bits, so strip everything
3606 that doesn't affect the machine mode. However, record the type of the
3607 outermost expression if it may matter below. */
3608 if (TREE_CODE (exp) == NOP_EXPR
3609 || TREE_CODE (exp) == CONVERT_EXPR
3610 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3611 outer_type = TREE_TYPE (exp);
3614 if (TREE_CODE (exp) == BIT_AND_EXPR)
3616 and_mask = TREE_OPERAND (exp, 1);
3617 exp = TREE_OPERAND (exp, 0);
3618 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3619 if (TREE_CODE (and_mask) != INTEGER_CST)
3623 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3624 punsignedp, pvolatilep, false);
3625 if ((inner == exp && and_mask == 0)
3626 || *pbitsize < 0 || offset != 0
3627 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3630 /* If the number of bits in the reference is the same as the bitsize of
3631 the outer type, then the outer type gives the signedness. Otherwise
3632 (in case of a small bitfield) the signedness is unchanged. */
3633 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3634 *punsignedp = TYPE_UNSIGNED (outer_type);
3636 /* Compute the mask to access the bitfield. */
3637 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3638 precision = TYPE_PRECISION (unsigned_type);
3640 mask = build_int_cst_type (unsigned_type, -1);
3642 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3643 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3645 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3647 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3648 fold_convert (unsigned_type, and_mask), mask);
3651 *pand_mask = and_mask;
3655 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3659 all_ones_mask_p (tree mask, int size)
3661 tree type = TREE_TYPE (mask);
3662 unsigned int precision = TYPE_PRECISION (type);
3665 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3668 tree_int_cst_equal (mask,
3669 const_binop (RSHIFT_EXPR,
3670 const_binop (LSHIFT_EXPR, tmask,
3671 size_int (precision - size),
3673 size_int (precision - size), 0));
3676 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3677 represents the sign bit of EXP's type. If EXP represents a sign
3678 or zero extension, also test VAL against the unextended type.
3679 The return value is the (sub)expression whose sign bit is VAL,
3680 or NULL_TREE otherwise. */
3683 sign_bit_p (tree exp, tree val)
3685 unsigned HOST_WIDE_INT mask_lo, lo;
3686 HOST_WIDE_INT mask_hi, hi;
3690 /* Tree EXP must have an integral type. */
3691 t = TREE_TYPE (exp);
3692 if (! INTEGRAL_TYPE_P (t))
3695 /* Tree VAL must be an integer constant. */
3696 if (TREE_CODE (val) != INTEGER_CST
3697 || TREE_OVERFLOW (val))
3700 width = TYPE_PRECISION (t);
3701 if (width > HOST_BITS_PER_WIDE_INT)
3703 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3706 mask_hi = ((unsigned HOST_WIDE_INT) -1
3707 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3713 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3716 mask_lo = ((unsigned HOST_WIDE_INT) -1
3717 >> (HOST_BITS_PER_WIDE_INT - width));
3720 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3721 treat VAL as if it were unsigned. */
3722 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3723 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3726 /* Handle extension from a narrower type. */
3727 if (TREE_CODE (exp) == NOP_EXPR
3728 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3729 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3734 /* Subroutine for fold_truthop: determine if an operand is simple enough
3735 to be evaluated unconditionally. */
3738 simple_operand_p (tree exp)
3740 /* Strip any conversions that don't change the machine mode. */
3743 return (CONSTANT_CLASS_P (exp)
3744 || TREE_CODE (exp) == SSA_NAME
3746 && ! TREE_ADDRESSABLE (exp)
3747 && ! TREE_THIS_VOLATILE (exp)
3748 && ! DECL_NONLOCAL (exp)
3749 /* Don't regard global variables as simple. They may be
3750 allocated in ways unknown to the compiler (shared memory,
3751 #pragma weak, etc). */
3752 && ! TREE_PUBLIC (exp)
3753 && ! DECL_EXTERNAL (exp)
3754 /* Loading a static variable is unduly expensive, but global
3755 registers aren't expensive. */
3756 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3759 /* The following functions are subroutines to fold_range_test and allow it to
3760 try to change a logical combination of comparisons into a range test.
3763 X == 2 || X == 3 || X == 4 || X == 5
3767 (unsigned) (X - 2) <= 3
3769 We describe each set of comparisons as being either inside or outside
3770 a range, using a variable named like IN_P, and then describe the
3771 range with a lower and upper bound. If one of the bounds is omitted,
3772 it represents either the highest or lowest value of the type.
3774 In the comments below, we represent a range by two numbers in brackets
3775 preceded by a "+" to designate being inside that range, or a "-" to
3776 designate being outside that range, so the condition can be inverted by
3777 flipping the prefix. An omitted bound is represented by a "-". For
3778 example, "- [-, 10]" means being outside the range starting at the lowest
3779 possible value and ending at 10, in other words, being greater than 10.
3780 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3783 We set up things so that the missing bounds are handled in a consistent
3784 manner so neither a missing bound nor "true" and "false" need to be
3785 handled using a special case. */
3787 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3788 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3789 and UPPER1_P are nonzero if the respective argument is an upper bound
3790 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3791 must be specified for a comparison. ARG1 will be converted to ARG0's
3792 type if both are specified. */
3795 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3796 tree arg1, int upper1_p)
3802 /* If neither arg represents infinity, do the normal operation.
3803 Else, if not a comparison, return infinity. Else handle the special
3804 comparison rules. Note that most of the cases below won't occur, but
3805 are handled for consistency. */
3807 if (arg0 != 0 && arg1 != 0)
3809 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3810 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3812 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3815 if (TREE_CODE_CLASS (code) != tcc_comparison)
3818 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3819 for neither. In real maths, we cannot assume open ended ranges are
3820 the same. But, this is computer arithmetic, where numbers are finite.
3821 We can therefore make the transformation of any unbounded range with
3822 the value Z, Z being greater than any representable number. This permits
3823 us to treat unbounded ranges as equal. */
3824 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3825 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3829 result = sgn0 == sgn1;
3832 result = sgn0 != sgn1;
3835 result = sgn0 < sgn1;
3838 result = sgn0 <= sgn1;
3841 result = sgn0 > sgn1;
3844 result = sgn0 >= sgn1;
3850 return constant_boolean_node (result, type);
3853 /* Given EXP, a logical expression, set the range it is testing into
3854 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3855 actually being tested. *PLOW and *PHIGH will be made of the same type
3856 as the returned expression. If EXP is not a comparison, we will most
3857 likely not be returning a useful value and range. */
3860 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3862 enum tree_code code;
3863 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3864 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3866 tree low, high, n_low, n_high;
3868 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3869 and see if we can refine the range. Some of the cases below may not
3870 happen, but it doesn't seem worth worrying about this. We "continue"
3871 the outer loop when we've changed something; otherwise we "break"
3872 the switch, which will "break" the while. */
3875 low = high = build_int_cst (TREE_TYPE (exp), 0);
3879 code = TREE_CODE (exp);
3880 exp_type = TREE_TYPE (exp);
3882 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3884 if (TREE_CODE_LENGTH (code) > 0)
3885 arg0 = TREE_OPERAND (exp, 0);
3886 if (TREE_CODE_CLASS (code) == tcc_comparison
3887 || TREE_CODE_CLASS (code) == tcc_unary
3888 || TREE_CODE_CLASS (code) == tcc_binary)
3889 arg0_type = TREE_TYPE (arg0);
3890 if (TREE_CODE_CLASS (code) == tcc_binary
3891 || TREE_CODE_CLASS (code) == tcc_comparison
3892 || (TREE_CODE_CLASS (code) == tcc_expression
3893 && TREE_CODE_LENGTH (code) > 1))
3894 arg1 = TREE_OPERAND (exp, 1);
3899 case TRUTH_NOT_EXPR:
3900 in_p = ! in_p, exp = arg0;
3903 case EQ_EXPR: case NE_EXPR:
3904 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3905 /* We can only do something if the range is testing for zero
3906 and if the second operand is an integer constant. Note that
3907 saying something is "in" the range we make is done by
3908 complementing IN_P since it will set in the initial case of
3909 being not equal to zero; "out" is leaving it alone. */
3910 if (low == 0 || high == 0
3911 || ! integer_zerop (low) || ! integer_zerop (high)
3912 || TREE_CODE (arg1) != INTEGER_CST)
3917 case NE_EXPR: /* - [c, c] */
3920 case EQ_EXPR: /* + [c, c] */
3921 in_p = ! in_p, low = high = arg1;
3923 case GT_EXPR: /* - [-, c] */
3924 low = 0, high = arg1;
3926 case GE_EXPR: /* + [c, -] */
3927 in_p = ! in_p, low = arg1, high = 0;
3929 case LT_EXPR: /* - [c, -] */
3930 low = arg1, high = 0;
3932 case LE_EXPR: /* + [-, c] */
3933 in_p = ! in_p, low = 0, high = arg1;
3939 /* If this is an unsigned comparison, we also know that EXP is
3940 greater than or equal to zero. We base the range tests we make
3941 on that fact, so we record it here so we can parse existing
3942 range tests. We test arg0_type since often the return type
3943 of, e.g. EQ_EXPR, is boolean. */
3944 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3946 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3948 build_int_cst (arg0_type, 0),
3952 in_p = n_in_p, low = n_low, high = n_high;
3954 /* If the high bound is missing, but we have a nonzero low
3955 bound, reverse the range so it goes from zero to the low bound
3957 if (high == 0 && low && ! integer_zerop (low))
3960 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3961 integer_one_node, 0);
3962 low = build_int_cst (arg0_type, 0);
3970 /* (-x) IN [a,b] -> x in [-b, -a] */
3971 n_low = range_binop (MINUS_EXPR, exp_type,
3972 build_int_cst (exp_type, 0),
3974 n_high = range_binop (MINUS_EXPR, exp_type,
3975 build_int_cst (exp_type, 0),
3977 low = n_low, high = n_high;
3983 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3984 build_int_cst (exp_type, 1));
3987 case PLUS_EXPR: case MINUS_EXPR:
3988 if (TREE_CODE (arg1) != INTEGER_CST)
3991 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3992 move a constant to the other side. */
3993 if (!TYPE_UNSIGNED (arg0_type)
3994 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3997 /* If EXP is signed, any overflow in the computation is undefined,
3998 so we don't worry about it so long as our computations on
3999 the bounds don't overflow. For unsigned, overflow is defined
4000 and this is exactly the right thing. */
4001 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4002 arg0_type, low, 0, arg1, 0);
4003 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4004 arg0_type, high, 1, arg1, 0);
4005 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4006 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4009 /* Check for an unsigned range which has wrapped around the maximum
4010 value thus making n_high < n_low, and normalize it. */
4011 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4013 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4014 integer_one_node, 0);
4015 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4016 integer_one_node, 0);
4018 /* If the range is of the form +/- [ x+1, x ], we won't
4019 be able to normalize it. But then, it represents the
4020 whole range or the empty set, so make it
4022 if (tree_int_cst_equal (n_low, low)
4023 && tree_int_cst_equal (n_high, high))
4029 low = n_low, high = n_high;
4034 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4035 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4038 if (! INTEGRAL_TYPE_P (arg0_type)
4039 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4040 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4043 n_low = low, n_high = high;
4046 n_low = fold_convert (arg0_type, n_low);
4049 n_high = fold_convert (arg0_type, n_high);
4052 /* If we're converting arg0 from an unsigned type, to exp,
4053 a signed type, we will be doing the comparison as unsigned.
4054 The tests above have already verified that LOW and HIGH
4057 So we have to ensure that we will handle large unsigned
4058 values the same way that the current signed bounds treat
4061 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4064 tree equiv_type = lang_hooks.types.type_for_mode
4065 (TYPE_MODE (arg0_type), 1);
4067 /* A range without an upper bound is, naturally, unbounded.
4068 Since convert would have cropped a very large value, use
4069 the max value for the destination type. */
4071 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4072 : TYPE_MAX_VALUE (arg0_type);
4074 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4075 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4076 fold_convert (arg0_type,
4078 build_int_cst (arg0_type, 1));
4080 /* If the low bound is specified, "and" the range with the
4081 range for which the original unsigned value will be
4085 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4086 1, n_low, n_high, 1,
4087 fold_convert (arg0_type,
4092 in_p = (n_in_p == in_p);
4096 /* Otherwise, "or" the range with the range of the input
4097 that will be interpreted as negative. */
4098 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4099 0, n_low, n_high, 1,
4100 fold_convert (arg0_type,
4105 in_p = (in_p != n_in_p);
4110 low = n_low, high = n_high;
4120 /* If EXP is a constant, we can evaluate whether this is true or false. */
4121 if (TREE_CODE (exp) == INTEGER_CST)
4123 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4125 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4131 *pin_p = in_p, *plow = low, *phigh = high;
4135 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4136 type, TYPE, return an expression to test if EXP is in (or out of, depending
4137 on IN_P) the range. Return 0 if the test couldn't be created. */
4140 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4142 tree etype = TREE_TYPE (exp);
4145 #ifdef HAVE_canonicalize_funcptr_for_compare
4146 /* Disable this optimization for function pointer expressions
4147 on targets that require function pointer canonicalization. */
4148 if (HAVE_canonicalize_funcptr_for_compare
4149 && TREE_CODE (etype) == POINTER_TYPE
4150 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4156 value = build_range_check (type, exp, 1, low, high);
4158 return invert_truthvalue (value);
4163 if (low == 0 && high == 0)
4164 return build_int_cst (type, 1);
4167 return fold_build2 (LE_EXPR, type, exp,
4168 fold_convert (etype, high));
4171 return fold_build2 (GE_EXPR, type, exp,
4172 fold_convert (etype, low));
4174 if (operand_equal_p (low, high, 0))
4175 return fold_build2 (EQ_EXPR, type, exp,
4176 fold_convert (etype, low));
4178 if (integer_zerop (low))
4180 if (! TYPE_UNSIGNED (etype))
4182 etype = lang_hooks.types.unsigned_type (etype);
4183 high = fold_convert (etype, high);
4184 exp = fold_convert (etype, exp);
4186 return build_range_check (type, exp, 1, 0, high);
4189 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4190 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4192 unsigned HOST_WIDE_INT lo;
4196 prec = TYPE_PRECISION (etype);
4197 if (prec <= HOST_BITS_PER_WIDE_INT)
4200 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4204 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4205 lo = (unsigned HOST_WIDE_INT) -1;
4208 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4210 if (TYPE_UNSIGNED (etype))
4212 etype = lang_hooks.types.signed_type (etype);
4213 exp = fold_convert (etype, exp);
4215 return fold_build2 (GT_EXPR, type, exp,
4216 build_int_cst (etype, 0));
4220 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4221 This requires wrap-around arithmetics for the type of the expression. */
4222 switch (TREE_CODE (etype))
4225 /* There is no requirement that LOW be within the range of ETYPE
4226 if the latter is a subtype. It must, however, be within the base
4227 type of ETYPE. So be sure we do the subtraction in that type. */
4228 if (TREE_TYPE (etype))
4229 etype = TREE_TYPE (etype);
4234 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4235 TYPE_UNSIGNED (etype));
4242 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4243 if (TREE_CODE (etype) == INTEGER_TYPE
4244 && !TYPE_OVERFLOW_WRAPS (etype))
4246 tree utype, minv, maxv;
4248 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4249 for the type in question, as we rely on this here. */
4250 utype = lang_hooks.types.unsigned_type (etype);
4251 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4252 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4253 integer_one_node, 1);
4254 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4256 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4263 high = fold_convert (etype, high);
4264 low = fold_convert (etype, low);
4265 exp = fold_convert (etype, exp);
4267 value = const_binop (MINUS_EXPR, high, low, 0);
4269 if (value != 0 && !TREE_OVERFLOW (value))
4270 return build_range_check (type,
4271 fold_build2 (MINUS_EXPR, etype, exp, low),
4272 1, build_int_cst (etype, 0), value);
4277 /* Return the predecessor of VAL in its type, handling the infinite case. */
4280 range_predecessor (tree val)
4282 tree type = TREE_TYPE (val);
4284 if (INTEGRAL_TYPE_P (type)
4285 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4288 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4291 /* Return the successor of VAL in its type, handling the infinite case. */
4294 range_successor (tree val)
4296 tree type = TREE_TYPE (val);
4298 if (INTEGRAL_TYPE_P (type)
4299 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4302 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4305 /* Given two ranges, see if we can merge them into one. Return 1 if we
4306 can, 0 if we can't. Set the output range into the specified parameters. */
4309 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4310 tree high0, int in1_p, tree low1, tree high1)
4318 int lowequal = ((low0 == 0 && low1 == 0)
4319 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4320 low0, 0, low1, 0)));
4321 int highequal = ((high0 == 0 && high1 == 0)
4322 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4323 high0, 1, high1, 1)));
4325 /* Make range 0 be the range that starts first, or ends last if they
4326 start at the same value. Swap them if it isn't. */
4327 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4330 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4331 high1, 1, high0, 1))))
4333 temp = in0_p, in0_p = in1_p, in1_p = temp;
4334 tem = low0, low0 = low1, low1 = tem;
4335 tem = high0, high0 = high1, high1 = tem;
4338 /* Now flag two cases, whether the ranges are disjoint or whether the
4339 second range is totally subsumed in the first. Note that the tests
4340 below are simplified by the ones above. */
4341 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4342 high0, 1, low1, 0));
4343 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4344 high1, 1, high0, 1));
4346 /* We now have four cases, depending on whether we are including or
4347 excluding the two ranges. */
4350 /* If they don't overlap, the result is false. If the second range
4351 is a subset it is the result. Otherwise, the range is from the start
4352 of the second to the end of the first. */
4354 in_p = 0, low = high = 0;
4356 in_p = 1, low = low1, high = high1;
4358 in_p = 1, low = low1, high = high0;
4361 else if (in0_p && ! in1_p)
4363 /* If they don't overlap, the result is the first range. If they are
4364 equal, the result is false. If the second range is a subset of the
4365 first, and the ranges begin at the same place, we go from just after
4366 the end of the second range to the end of the first. If the second
4367 range is not a subset of the first, or if it is a subset and both
4368 ranges end at the same place, the range starts at the start of the
4369 first range and ends just before the second range.
4370 Otherwise, we can't describe this as a single range. */
4372 in_p = 1, low = low0, high = high0;
4373 else if (lowequal && highequal)
4374 in_p = 0, low = high = 0;
4375 else if (subset && lowequal)
4377 low = range_successor (high1);
4381 else if (! subset || highequal)
4384 high = range_predecessor (low1);
4391 else if (! in0_p && in1_p)
4393 /* If they don't overlap, the result is the second range. If the second
4394 is a subset of the first, the result is false. Otherwise,
4395 the range starts just after the first range and ends at the
4396 end of the second. */
4398 in_p = 1, low = low1, high = high1;
4399 else if (subset || highequal)
4400 in_p = 0, low = high = 0;
4403 low = range_successor (high0);
4411 /* The case where we are excluding both ranges. Here the complex case
4412 is if they don't overlap. In that case, the only time we have a
4413 range is if they are adjacent. If the second is a subset of the
4414 first, the result is the first. Otherwise, the range to exclude
4415 starts at the beginning of the first range and ends at the end of the
4419 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4420 range_successor (high0),
4422 in_p = 0, low = low0, high = high1;
4425 /* Canonicalize - [min, x] into - [-, x]. */
4426 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4427 switch (TREE_CODE (TREE_TYPE (low0)))
4430 if (TYPE_PRECISION (TREE_TYPE (low0))
4431 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4435 if (tree_int_cst_equal (low0,
4436 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4440 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4441 && integer_zerop (low0))
4448 /* Canonicalize - [x, max] into - [x, -]. */
4449 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4450 switch (TREE_CODE (TREE_TYPE (high1)))
4453 if (TYPE_PRECISION (TREE_TYPE (high1))
4454 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4458 if (tree_int_cst_equal (high1,
4459 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4463 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4464 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4466 integer_one_node, 1)))
4473 /* The ranges might be also adjacent between the maximum and
4474 minimum values of the given type. For
4475 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4476 return + [x + 1, y - 1]. */
4477 if (low0 == 0 && high1 == 0)
4479 low = range_successor (high0);
4480 high = range_predecessor (low1);
4481 if (low == 0 || high == 0)
4491 in_p = 0, low = low0, high = high0;
4493 in_p = 0, low = low0, high = high1;
4496 *pin_p = in_p, *plow = low, *phigh = high;
4501 /* Subroutine of fold, looking inside expressions of the form
4502 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4503 of the COND_EXPR. This function is being used also to optimize
4504 A op B ? C : A, by reversing the comparison first.
4506 Return a folded expression whose code is not a COND_EXPR
4507 anymore, or NULL_TREE if no folding opportunity is found. */
4510 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4512 enum tree_code comp_code = TREE_CODE (arg0);
4513 tree arg00 = TREE_OPERAND (arg0, 0);
4514 tree arg01 = TREE_OPERAND (arg0, 1);
4515 tree arg1_type = TREE_TYPE (arg1);
4521 /* If we have A op 0 ? A : -A, consider applying the following
4524 A == 0? A : -A same as -A
4525 A != 0? A : -A same as A
4526 A >= 0? A : -A same as abs (A)
4527 A > 0? A : -A same as abs (A)
4528 A <= 0? A : -A same as -abs (A)
4529 A < 0? A : -A same as -abs (A)
4531 None of these transformations work for modes with signed
4532 zeros. If A is +/-0, the first two transformations will
4533 change the sign of the result (from +0 to -0, or vice
4534 versa). The last four will fix the sign of the result,
4535 even though the original expressions could be positive or
4536 negative, depending on the sign of A.
4538 Note that all these transformations are correct if A is
4539 NaN, since the two alternatives (A and -A) are also NaNs. */
4540 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4541 ? real_zerop (arg01)
4542 : integer_zerop (arg01))
4543 && ((TREE_CODE (arg2) == NEGATE_EXPR
4544 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4545 /* In the case that A is of the form X-Y, '-A' (arg2) may
4546 have already been folded to Y-X, check for that. */
4547 || (TREE_CODE (arg1) == MINUS_EXPR
4548 && TREE_CODE (arg2) == MINUS_EXPR
4549 && operand_equal_p (TREE_OPERAND (arg1, 0),
4550 TREE_OPERAND (arg2, 1), 0)
4551 && operand_equal_p (TREE_OPERAND (arg1, 1),
4552 TREE_OPERAND (arg2, 0), 0))))
4557 tem = fold_convert (arg1_type, arg1);
4558 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4561 return pedantic_non_lvalue (fold_convert (type, arg1));
4564 if (flag_trapping_math)
4569 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4570 arg1 = fold_convert (lang_hooks.types.signed_type
4571 (TREE_TYPE (arg1)), arg1);
4572 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4573 return pedantic_non_lvalue (fold_convert (type, tem));
4576 if (flag_trapping_math)
4580 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4581 arg1 = fold_convert (lang_hooks.types.signed_type
4582 (TREE_TYPE (arg1)), arg1);
4583 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4584 return negate_expr (fold_convert (type, tem));
4586 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4590 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4591 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4592 both transformations are correct when A is NaN: A != 0
4593 is then true, and A == 0 is false. */
4595 if (integer_zerop (arg01) && integer_zerop (arg2))
4597 if (comp_code == NE_EXPR)
4598 return pedantic_non_lvalue (fold_convert (type, arg1));
4599 else if (comp_code == EQ_EXPR)
4600 return build_int_cst (type, 0);
4603 /* Try some transformations of A op B ? A : B.
4605 A == B? A : B same as B
4606 A != B? A : B same as A
4607 A >= B? A : B same as max (A, B)
4608 A > B? A : B same as max (B, A)
4609 A <= B? A : B same as min (A, B)
4610 A < B? A : B same as min (B, A)
4612 As above, these transformations don't work in the presence
4613 of signed zeros. For example, if A and B are zeros of
4614 opposite sign, the first two transformations will change
4615 the sign of the result. In the last four, the original
4616 expressions give different results for (A=+0, B=-0) and
4617 (A=-0, B=+0), but the transformed expressions do not.
4619 The first two transformations are correct if either A or B
4620 is a NaN. In the first transformation, the condition will
4621 be false, and B will indeed be chosen. In the case of the
4622 second transformation, the condition A != B will be true,
4623 and A will be chosen.
4625 The conversions to max() and min() are not correct if B is
4626 a number and A is not. The conditions in the original
4627 expressions will be false, so all four give B. The min()
4628 and max() versions would give a NaN instead. */
4629 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4630 /* Avoid these transformations if the COND_EXPR may be used
4631 as an lvalue in the C++ front-end. PR c++/19199. */
4633 || (strcmp (lang_hooks.name, "GNU C++") != 0
4634 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4635 || ! maybe_lvalue_p (arg1)
4636 || ! maybe_lvalue_p (arg2)))
4638 tree comp_op0 = arg00;
4639 tree comp_op1 = arg01;
4640 tree comp_type = TREE_TYPE (comp_op0);
4642 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4643 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4653 return pedantic_non_lvalue (fold_convert (type, arg2));
4655 return pedantic_non_lvalue (fold_convert (type, arg1));
4660 /* In C++ a ?: expression can be an lvalue, so put the
4661 operand which will be used if they are equal first
4662 so that we can convert this back to the
4663 corresponding COND_EXPR. */
4664 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4666 comp_op0 = fold_convert (comp_type, comp_op0);
4667 comp_op1 = fold_convert (comp_type, comp_op1);
4668 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4669 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4670 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4671 return pedantic_non_lvalue (fold_convert (type, tem));
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4680 comp_op0 = fold_convert (comp_type, comp_op0);
4681 comp_op1 = fold_convert (comp_type, comp_op1);
4682 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4683 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4684 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4685 return pedantic_non_lvalue (fold_convert (type, tem));
4689 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4690 return pedantic_non_lvalue (fold_convert (type, arg2));
4693 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4694 return pedantic_non_lvalue (fold_convert (type, arg1));
4697 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4702 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4703 we might still be able to simplify this. For example,
4704 if C1 is one less or one more than C2, this might have started
4705 out as a MIN or MAX and been transformed by this function.
4706 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4708 if (INTEGRAL_TYPE_P (type)
4709 && TREE_CODE (arg01) == INTEGER_CST
4710 && TREE_CODE (arg2) == INTEGER_CST)
4714 /* We can replace A with C1 in this case. */
4715 arg1 = fold_convert (type, arg01);
4716 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4719 /* If C1 is C2 + 1, this is min(A, C2). */
4720 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4722 && operand_equal_p (arg01,
4723 const_binop (PLUS_EXPR, arg2,
4724 build_int_cst (type, 1), 0),
4726 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4731 /* If C1 is C2 - 1, this is min(A, C2). */
4732 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4734 && operand_equal_p (arg01,
4735 const_binop (MINUS_EXPR, arg2,
4736 build_int_cst (type, 1), 0),
4738 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4743 /* If C1 is C2 - 1, this is max(A, C2). */
4744 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4746 && operand_equal_p (arg01,
4747 const_binop (MINUS_EXPR, arg2,
4748 build_int_cst (type, 1), 0),
4750 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4755 /* If C1 is C2 + 1, this is max(A, C2). */
4756 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4758 && operand_equal_p (arg01,
4759 const_binop (PLUS_EXPR, arg2,
4760 build_int_cst (type, 1), 0),
4762 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4776 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4777 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4780 /* EXP is some logical combination of boolean tests. See if we can
4781 merge it into some range test. Return the new tree if so. */
4784 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4786 int or_op = (code == TRUTH_ORIF_EXPR
4787 || code == TRUTH_OR_EXPR);
4788 int in0_p, in1_p, in_p;
4789 tree low0, low1, low, high0, high1, high;
4790 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4791 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4794 /* If this is an OR operation, invert both sides; we will invert
4795 again at the end. */
4797 in0_p = ! in0_p, in1_p = ! in1_p;
4799 /* If both expressions are the same, if we can merge the ranges, and we
4800 can build the range test, return it or it inverted. If one of the
4801 ranges is always true or always false, consider it to be the same
4802 expression as the other. */
4803 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4804 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4806 && 0 != (tem = (build_range_check (type,
4808 : rhs != 0 ? rhs : integer_zero_node,
4810 return or_op ? invert_truthvalue (tem) : tem;
4812 /* On machines where the branch cost is expensive, if this is a
4813 short-circuited branch and the underlying object on both sides
4814 is the same, make a non-short-circuit operation. */
4815 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4816 && lhs != 0 && rhs != 0
4817 && (code == TRUTH_ANDIF_EXPR
4818 || code == TRUTH_ORIF_EXPR)
4819 && operand_equal_p (lhs, rhs, 0))
4821 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4822 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4823 which cases we can't do this. */
4824 if (simple_operand_p (lhs))
4825 return build2 (code == TRUTH_ANDIF_EXPR
4826 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4829 else if (lang_hooks.decls.global_bindings_p () == 0
4830 && ! CONTAINS_PLACEHOLDER_P (lhs))
4832 tree common = save_expr (lhs);
4834 if (0 != (lhs = build_range_check (type, common,
4835 or_op ? ! in0_p : in0_p,
4837 && (0 != (rhs = build_range_check (type, common,
4838 or_op ? ! in1_p : in1_p,
4840 return build2 (code == TRUTH_ANDIF_EXPR
4841 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4849 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4850 bit value. Arrange things so the extra bits will be set to zero if and
4851 only if C is signed-extended to its full width. If MASK is nonzero,
4852 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4855 unextend (tree c, int p, int unsignedp, tree mask)
4857 tree type = TREE_TYPE (c);
4858 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4861 if (p == modesize || unsignedp)
4864 /* We work by getting just the sign bit into the low-order bit, then
4865 into the high-order bit, then sign-extend. We then XOR that value
4867 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4868 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4870 /* We must use a signed type in order to get an arithmetic right shift.
4871 However, we must also avoid introducing accidental overflows, so that
4872 a subsequent call to integer_zerop will work. Hence we must
4873 do the type conversion here. At this point, the constant is either
4874 zero or one, and the conversion to a signed type can never overflow.
4875 We could get an overflow if this conversion is done anywhere else. */
4876 if (TYPE_UNSIGNED (type))
4877 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4879 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4880 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4882 temp = const_binop (BIT_AND_EXPR, temp,
4883 fold_convert (TREE_TYPE (c), mask), 0);
4884 /* If necessary, convert the type back to match the type of C. */
4885 if (TYPE_UNSIGNED (type))
4886 temp = fold_convert (type, temp);
4888 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4891 /* Find ways of folding logical expressions of LHS and RHS:
4892 Try to merge two comparisons to the same innermost item.
4893 Look for range tests like "ch >= '0' && ch <= '9'".
4894 Look for combinations of simple terms on machines with expensive branches
4895 and evaluate the RHS unconditionally.
4897 For example, if we have p->a == 2 && p->b == 4 and we can make an
4898 object large enough to span both A and B, we can do this with a comparison
4899 against the object ANDed with the a mask.
4901 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4902 operations to do this with one comparison.
4904 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4905 function and the one above.
4907 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4908 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4910 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4913 We return the simplified tree or 0 if no optimization is possible. */
4916 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4918 /* If this is the "or" of two comparisons, we can do something if
4919 the comparisons are NE_EXPR. If this is the "and", we can do something
4920 if the comparisons are EQ_EXPR. I.e.,
4921 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4923 WANTED_CODE is this operation code. For single bit fields, we can
4924 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4925 comparison for one-bit fields. */
4927 enum tree_code wanted_code;
4928 enum tree_code lcode, rcode;
4929 tree ll_arg, lr_arg, rl_arg, rr_arg;
4930 tree ll_inner, lr_inner, rl_inner, rr_inner;
4931 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4932 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4933 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4934 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4935 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4936 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4937 enum machine_mode lnmode, rnmode;
4938 tree ll_mask, lr_mask, rl_mask, rr_mask;
4939 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4940 tree l_const, r_const;
4941 tree lntype, rntype, result;
4942 int first_bit, end_bit;
4944 tree orig_lhs = lhs, orig_rhs = rhs;
4945 enum tree_code orig_code = code;
4947 /* Start by getting the comparison codes. Fail if anything is volatile.
4948 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4949 it were surrounded with a NE_EXPR. */
4951 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4954 lcode = TREE_CODE (lhs);
4955 rcode = TREE_CODE (rhs);
4957 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4959 lhs = build2 (NE_EXPR, truth_type, lhs,
4960 build_int_cst (TREE_TYPE (lhs), 0));
4964 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4966 rhs = build2 (NE_EXPR, truth_type, rhs,
4967 build_int_cst (TREE_TYPE (rhs), 0));
4971 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4972 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4975 ll_arg = TREE_OPERAND (lhs, 0);
4976 lr_arg = TREE_OPERAND (lhs, 1);
4977 rl_arg = TREE_OPERAND (rhs, 0);
4978 rr_arg = TREE_OPERAND (rhs, 1);
4980 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4981 if (simple_operand_p (ll_arg)
4982 && simple_operand_p (lr_arg))
4985 if (operand_equal_p (ll_arg, rl_arg, 0)
4986 && operand_equal_p (lr_arg, rr_arg, 0))
4988 result = combine_comparisons (code, lcode, rcode,
4989 truth_type, ll_arg, lr_arg);
4993 else if (operand_equal_p (ll_arg, rr_arg, 0)
4994 && operand_equal_p (lr_arg, rl_arg, 0))
4996 result = combine_comparisons (code, lcode,
4997 swap_tree_comparison (rcode),
4998 truth_type, ll_arg, lr_arg);
5004 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5005 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5007 /* If the RHS can be evaluated unconditionally and its operands are
5008 simple, it wins to evaluate the RHS unconditionally on machines
5009 with expensive branches. In this case, this isn't a comparison
5010 that can be merged. Avoid doing this if the RHS is a floating-point
5011 comparison since those can trap. */
5013 if (BRANCH_COST >= 2
5014 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5015 && simple_operand_p (rl_arg)
5016 && simple_operand_p (rr_arg))
5018 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5019 if (code == TRUTH_OR_EXPR
5020 && lcode == NE_EXPR && integer_zerop (lr_arg)
5021 && rcode == NE_EXPR && integer_zerop (rr_arg)
5022 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5023 return build2 (NE_EXPR, truth_type,
5024 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5026 build_int_cst (TREE_TYPE (ll_arg), 0));
5028 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5029 if (code == TRUTH_AND_EXPR
5030 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5031 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5032 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5033 return build2 (EQ_EXPR, truth_type,
5034 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5036 build_int_cst (TREE_TYPE (ll_arg), 0));
5038 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5040 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5041 return build2 (code, truth_type, lhs, rhs);
5046 /* See if the comparisons can be merged. Then get all the parameters for
5049 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5050 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5054 ll_inner = decode_field_reference (ll_arg,
5055 &ll_bitsize, &ll_bitpos, &ll_mode,
5056 &ll_unsignedp, &volatilep, &ll_mask,
5058 lr_inner = decode_field_reference (lr_arg,
5059 &lr_bitsize, &lr_bitpos, &lr_mode,
5060 &lr_unsignedp, &volatilep, &lr_mask,
5062 rl_inner = decode_field_reference (rl_arg,
5063 &rl_bitsize, &rl_bitpos, &rl_mode,
5064 &rl_unsignedp, &volatilep, &rl_mask,
5066 rr_inner = decode_field_reference (rr_arg,
5067 &rr_bitsize, &rr_bitpos, &rr_mode,
5068 &rr_unsignedp, &volatilep, &rr_mask,
5071 /* It must be true that the inner operation on the lhs of each
5072 comparison must be the same if we are to be able to do anything.
5073 Then see if we have constants. If not, the same must be true for
5075 if (volatilep || ll_inner == 0 || rl_inner == 0
5076 || ! operand_equal_p (ll_inner, rl_inner, 0))
5079 if (TREE_CODE (lr_arg) == INTEGER_CST
5080 && TREE_CODE (rr_arg) == INTEGER_CST)
5081 l_const = lr_arg, r_const = rr_arg;
5082 else if (lr_inner == 0 || rr_inner == 0
5083 || ! operand_equal_p (lr_inner, rr_inner, 0))
5086 l_const = r_const = 0;
5088 /* If either comparison code is not correct for our logical operation,
5089 fail. However, we can convert a one-bit comparison against zero into
5090 the opposite comparison against that bit being set in the field. */
5092 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5093 if (lcode != wanted_code)
5095 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5097 /* Make the left operand unsigned, since we are only interested
5098 in the value of one bit. Otherwise we are doing the wrong
5107 /* This is analogous to the code for l_const above. */
5108 if (rcode != wanted_code)
5110 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5119 /* See if we can find a mode that contains both fields being compared on
5120 the left. If we can't, fail. Otherwise, update all constants and masks
5121 to be relative to a field of that size. */
5122 first_bit = MIN (ll_bitpos, rl_bitpos);
5123 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5124 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5125 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5127 if (lnmode == VOIDmode)
5130 lnbitsize = GET_MODE_BITSIZE (lnmode);
5131 lnbitpos = first_bit & ~ (lnbitsize - 1);
5132 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5133 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5135 if (BYTES_BIG_ENDIAN)
5137 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5138 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5141 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5142 size_int (xll_bitpos), 0);
5143 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5144 size_int (xrl_bitpos), 0);
5148 l_const = fold_convert (lntype, l_const);
5149 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5150 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5151 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5152 fold_build1 (BIT_NOT_EXPR,
5156 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5158 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5163 r_const = fold_convert (lntype, r_const);
5164 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5165 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5166 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5167 fold_build1 (BIT_NOT_EXPR,
5171 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5173 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5177 /* If the right sides are not constant, do the same for it. Also,
5178 disallow this optimization if a size or signedness mismatch occurs
5179 between the left and right sides. */
5182 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5183 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5184 /* Make sure the two fields on the right
5185 correspond to the left without being swapped. */
5186 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5189 first_bit = MIN (lr_bitpos, rr_bitpos);
5190 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5191 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5192 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5194 if (rnmode == VOIDmode)
5197 rnbitsize = GET_MODE_BITSIZE (rnmode);
5198 rnbitpos = first_bit & ~ (rnbitsize - 1);
5199 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5200 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5202 if (BYTES_BIG_ENDIAN)
5204 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5205 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5208 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5209 size_int (xlr_bitpos), 0);
5210 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5211 size_int (xrr_bitpos), 0);
5213 /* Make a mask that corresponds to both fields being compared.
5214 Do this for both items being compared. If the operands are the
5215 same size and the bits being compared are in the same position
5216 then we can do this by masking both and comparing the masked
5218 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5219 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5220 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5222 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5223 ll_unsignedp || rl_unsignedp);
5224 if (! all_ones_mask_p (ll_mask, lnbitsize))
5225 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5227 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5228 lr_unsignedp || rr_unsignedp);
5229 if (! all_ones_mask_p (lr_mask, rnbitsize))
5230 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5232 return build2 (wanted_code, truth_type, lhs, rhs);
5235 /* There is still another way we can do something: If both pairs of
5236 fields being compared are adjacent, we may be able to make a wider
5237 field containing them both.
5239 Note that we still must mask the lhs/rhs expressions. Furthermore,
5240 the mask must be shifted to account for the shift done by
5241 make_bit_field_ref. */
5242 if ((ll_bitsize + ll_bitpos == rl_bitpos
5243 && lr_bitsize + lr_bitpos == rr_bitpos)
5244 || (ll_bitpos == rl_bitpos + rl_bitsize
5245 && lr_bitpos == rr_bitpos + rr_bitsize))
5249 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5250 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5251 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5252 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5254 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5255 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5256 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5257 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5259 /* Convert to the smaller type before masking out unwanted bits. */
5261 if (lntype != rntype)
5263 if (lnbitsize > rnbitsize)
5265 lhs = fold_convert (rntype, lhs);
5266 ll_mask = fold_convert (rntype, ll_mask);
5269 else if (lnbitsize < rnbitsize)
5271 rhs = fold_convert (lntype, rhs);
5272 lr_mask = fold_convert (lntype, lr_mask);
5277 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5278 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5280 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5281 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5283 return build2 (wanted_code, truth_type, lhs, rhs);
5289 /* Handle the case of comparisons with constants. If there is something in
5290 common between the masks, those bits of the constants must be the same.
5291 If not, the condition is always false. Test for this to avoid generating
5292 incorrect code below. */
5293 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5294 if (! integer_zerop (result)
5295 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5296 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5298 if (wanted_code == NE_EXPR)
5300 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5301 return constant_boolean_node (true, truth_type);
5305 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5306 return constant_boolean_node (false, truth_type);
5310 /* Construct the expression we will return. First get the component
5311 reference we will make. Unless the mask is all ones the width of
5312 that field, perform the mask operation. Then compare with the
5314 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5315 ll_unsignedp || rl_unsignedp);
5317 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5318 if (! all_ones_mask_p (ll_mask, lnbitsize))
5319 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5321 return build2 (wanted_code, truth_type, result,
5322 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5325 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5329 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5332 enum tree_code op_code;
5333 tree comp_const = op1;
5335 int consts_equal, consts_lt;
5338 STRIP_SIGN_NOPS (arg0);
5340 op_code = TREE_CODE (arg0);
5341 minmax_const = TREE_OPERAND (arg0, 1);
5342 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5343 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5344 inner = TREE_OPERAND (arg0, 0);
5346 /* If something does not permit us to optimize, return the original tree. */
5347 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5348 || TREE_CODE (comp_const) != INTEGER_CST
5349 || TREE_OVERFLOW (comp_const)
5350 || TREE_CODE (minmax_const) != INTEGER_CST
5351 || TREE_OVERFLOW (minmax_const))
5354 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5355 and GT_EXPR, doing the rest with recursive calls using logical
5359 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5361 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5364 return invert_truthvalue (tem);
5370 fold_build2 (TRUTH_ORIF_EXPR, type,
5371 optimize_minmax_comparison
5372 (EQ_EXPR, type, arg0, comp_const),
5373 optimize_minmax_comparison
5374 (GT_EXPR, type, arg0, comp_const));
5377 if (op_code == MAX_EXPR && consts_equal)
5378 /* MAX (X, 0) == 0 -> X <= 0 */
5379 return fold_build2 (LE_EXPR, type, inner, comp_const);
5381 else if (op_code == MAX_EXPR && consts_lt)
5382 /* MAX (X, 0) == 5 -> X == 5 */
5383 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5385 else if (op_code == MAX_EXPR)
5386 /* MAX (X, 0) == -1 -> false */
5387 return omit_one_operand (type, integer_zero_node, inner);
5389 else if (consts_equal)
5390 /* MIN (X, 0) == 0 -> X >= 0 */
5391 return fold_build2 (GE_EXPR, type, inner, comp_const);
5394 /* MIN (X, 0) == 5 -> false */
5395 return omit_one_operand (type, integer_zero_node, inner);
5398 /* MIN (X, 0) == -1 -> X == -1 */
5399 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5402 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5403 /* MAX (X, 0) > 0 -> X > 0
5404 MAX (X, 0) > 5 -> X > 5 */
5405 return fold_build2 (GT_EXPR, type, inner, comp_const);
5407 else if (op_code == MAX_EXPR)
5408 /* MAX (X, 0) > -1 -> true */
5409 return omit_one_operand (type, integer_one_node, inner);
5411 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5412 /* MIN (X, 0) > 0 -> false
5413 MIN (X, 0) > 5 -> false */
5414 return omit_one_operand (type, integer_zero_node, inner);
5417 /* MIN (X, 0) > -1 -> X > -1 */
5418 return fold_build2 (GT_EXPR, type, inner, comp_const);
5425 /* T is an integer expression that is being multiplied, divided, or taken a
5426 modulus (CODE says which and what kind of divide or modulus) by a
5427 constant C. See if we can eliminate that operation by folding it with
5428 other operations already in T. WIDE_TYPE, if non-null, is a type that
5429 should be used for the computation if wider than our type.
5431 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5432 (X * 2) + (Y * 4). We must, however, be assured that either the original
5433 expression would not overflow or that overflow is undefined for the type
5434 in the language in question.
5436 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5437 the machine has a multiply-accumulate insn or that this is part of an
5438 addressing calculation.
5440 If we return a non-null expression, it is an equivalent form of the
5441 original computation, but need not be in the original type. */
5444 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5446 /* To avoid exponential search depth, refuse to allow recursion past
5447 three levels. Beyond that (1) it's highly unlikely that we'll find
5448 something interesting and (2) we've probably processed it before
5449 when we built the inner expression. */
5458 ret = extract_muldiv_1 (t, c, code, wide_type);
5465 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5467 tree type = TREE_TYPE (t);
5468 enum tree_code tcode = TREE_CODE (t);
5469 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5470 > GET_MODE_SIZE (TYPE_MODE (type)))
5471 ? wide_type : type);
5473 int same_p = tcode == code;
5474 tree op0 = NULL_TREE, op1 = NULL_TREE;
5476 /* Don't deal with constants of zero here; they confuse the code below. */
5477 if (integer_zerop (c))
5480 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5481 op0 = TREE_OPERAND (t, 0);
5483 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5484 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5486 /* Note that we need not handle conditional operations here since fold
5487 already handles those cases. So just do arithmetic here. */
5491 /* For a constant, we can always simplify if we are a multiply
5492 or (for divide and modulus) if it is a multiple of our constant. */
5493 if (code == MULT_EXPR
5494 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5495 return const_binop (code, fold_convert (ctype, t),
5496 fold_convert (ctype, c), 0);
5499 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5500 /* If op0 is an expression ... */
5501 if ((COMPARISON_CLASS_P (op0)
5502 || UNARY_CLASS_P (op0)
5503 || BINARY_CLASS_P (op0)
5504 || EXPRESSION_CLASS_P (op0))
5505 /* ... and is unsigned, and its type is smaller than ctype,
5506 then we cannot pass through as widening. */
5507 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5508 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5509 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5510 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5511 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5512 /* ... or this is a truncation (t is narrower than op0),
5513 then we cannot pass through this narrowing. */
5514 || (GET_MODE_SIZE (TYPE_MODE (type))
5515 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5516 /* ... or signedness changes for division or modulus,
5517 then we cannot pass through this conversion. */
5518 || (code != MULT_EXPR
5519 && (TYPE_UNSIGNED (ctype)
5520 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5523 /* Pass the constant down and see if we can make a simplification. If
5524 we can, replace this expression with the inner simplification for
5525 possible later conversion to our or some other type. */
5526 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5527 && TREE_CODE (t2) == INTEGER_CST
5528 && !TREE_OVERFLOW (t2)
5529 && (0 != (t1 = extract_muldiv (op0, t2, code,
5531 ? ctype : NULL_TREE))))
5536 /* If widening the type changes it from signed to unsigned, then we
5537 must avoid building ABS_EXPR itself as unsigned. */
5538 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5540 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5541 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5543 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5544 return fold_convert (ctype, t1);
5550 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5551 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5554 case MIN_EXPR: case MAX_EXPR:
5555 /* If widening the type changes the signedness, then we can't perform
5556 this optimization as that changes the result. */
5557 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5560 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5561 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5562 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5564 if (tree_int_cst_sgn (c) < 0)
5565 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5567 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5568 fold_convert (ctype, t2));
5572 case LSHIFT_EXPR: case RSHIFT_EXPR:
5573 /* If the second operand is constant, this is a multiplication
5574 or floor division, by a power of two, so we can treat it that
5575 way unless the multiplier or divisor overflows. Signed
5576 left-shift overflow is implementation-defined rather than
5577 undefined in C90, so do not convert signed left shift into
5579 if (TREE_CODE (op1) == INTEGER_CST
5580 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5581 /* const_binop may not detect overflow correctly,
5582 so check for it explicitly here. */
5583 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5584 && TREE_INT_CST_HIGH (op1) == 0
5585 && 0 != (t1 = fold_convert (ctype,
5586 const_binop (LSHIFT_EXPR,
5589 && !TREE_OVERFLOW (t1))
5590 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5591 ? MULT_EXPR : FLOOR_DIV_EXPR,
5592 ctype, fold_convert (ctype, op0), t1),
5593 c, code, wide_type);
5596 case PLUS_EXPR: case MINUS_EXPR:
5597 /* See if we can eliminate the operation on both sides. If we can, we
5598 can return a new PLUS or MINUS. If we can't, the only remaining
5599 cases where we can do anything are if the second operand is a
5601 t1 = extract_muldiv (op0, c, code, wide_type);
5602 t2 = extract_muldiv (op1, c, code, wide_type);
5603 if (t1 != 0 && t2 != 0
5604 && (code == MULT_EXPR
5605 /* If not multiplication, we can only do this if both operands
5606 are divisible by c. */
5607 || (multiple_of_p (ctype, op0, c)
5608 && multiple_of_p (ctype, op1, c))))
5609 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5610 fold_convert (ctype, t2));
5612 /* If this was a subtraction, negate OP1 and set it to be an addition.
5613 This simplifies the logic below. */
5614 if (tcode == MINUS_EXPR)
5615 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5617 if (TREE_CODE (op1) != INTEGER_CST)
5620 /* If either OP1 or C are negative, this optimization is not safe for
5621 some of the division and remainder types while for others we need
5622 to change the code. */
5623 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5625 if (code == CEIL_DIV_EXPR)
5626 code = FLOOR_DIV_EXPR;
5627 else if (code == FLOOR_DIV_EXPR)
5628 code = CEIL_DIV_EXPR;
5629 else if (code != MULT_EXPR
5630 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5634 /* If it's a multiply or a division/modulus operation of a multiple
5635 of our constant, do the operation and verify it doesn't overflow. */
5636 if (code == MULT_EXPR
5637 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5639 op1 = const_binop (code, fold_convert (ctype, op1),
5640 fold_convert (ctype, c), 0);
5641 /* We allow the constant to overflow with wrapping semantics. */
5643 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5649 /* If we have an unsigned type is not a sizetype, we cannot widen
5650 the operation since it will change the result if the original
5651 computation overflowed. */
5652 if (TYPE_UNSIGNED (ctype)
5653 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5657 /* If we were able to eliminate our operation from the first side,
5658 apply our operation to the second side and reform the PLUS. */
5659 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5660 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5662 /* The last case is if we are a multiply. In that case, we can
5663 apply the distributive law to commute the multiply and addition
5664 if the multiplication of the constants doesn't overflow. */
5665 if (code == MULT_EXPR)
5666 return fold_build2 (tcode, ctype,
5667 fold_build2 (code, ctype,
5668 fold_convert (ctype, op0),
5669 fold_convert (ctype, c)),
5675 /* We have a special case here if we are doing something like
5676 (C * 8) % 4 since we know that's zero. */
5677 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5678 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5679 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5680 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5681 return omit_one_operand (type, integer_zero_node, op0);
5683 /* ... fall through ... */
5685 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5686 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5687 /* If we can extract our operation from the LHS, do so and return a
5688 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5689 do something only if the second operand is a constant. */
5691 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5692 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5693 fold_convert (ctype, op1));
5694 else if (tcode == MULT_EXPR && code == MULT_EXPR
5695 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5696 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5697 fold_convert (ctype, t1));
5698 else if (TREE_CODE (op1) != INTEGER_CST)
5701 /* If these are the same operation types, we can associate them
5702 assuming no overflow. */
5704 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5705 fold_convert (ctype, c), 0))
5706 && !TREE_OVERFLOW (t1))
5707 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5709 /* If these operations "cancel" each other, we have the main
5710 optimizations of this pass, which occur when either constant is a
5711 multiple of the other, in which case we replace this with either an
5712 operation or CODE or TCODE.
5714 If we have an unsigned type that is not a sizetype, we cannot do
5715 this since it will change the result if the original computation
5717 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5718 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5719 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5720 || (tcode == MULT_EXPR
5721 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5722 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5724 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5725 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5726 fold_convert (ctype,
5727 const_binop (TRUNC_DIV_EXPR,
5729 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5730 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5731 fold_convert (ctype,
5732 const_binop (TRUNC_DIV_EXPR,
5744 /* Return a node which has the indicated constant VALUE (either 0 or
5745 1), and is of the indicated TYPE. */
5748 constant_boolean_node (int value, tree type)
5750 if (type == integer_type_node)
5751 return value ? integer_one_node : integer_zero_node;
5752 else if (type == boolean_type_node)
5753 return value ? boolean_true_node : boolean_false_node;
5755 return build_int_cst (type, value);
5759 /* Return true if expr looks like an ARRAY_REF and set base and
5760 offset to the appropriate trees. If there is no offset,
5761 offset is set to NULL_TREE. Base will be canonicalized to
5762 something you can get the element type from using
5763 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5764 in bytes to the base. */
5767 extract_array_ref (tree expr, tree *base, tree *offset)
5769 /* One canonical form is a PLUS_EXPR with the first
5770 argument being an ADDR_EXPR with a possible NOP_EXPR
5772 if (TREE_CODE (expr) == PLUS_EXPR)
5774 tree op0 = TREE_OPERAND (expr, 0);
5775 tree inner_base, dummy1;
5776 /* Strip NOP_EXPRs here because the C frontends and/or
5777 folders present us (int *)&x.a + 4B possibly. */
5779 if (extract_array_ref (op0, &inner_base, &dummy1))
5782 if (dummy1 == NULL_TREE)
5783 *offset = TREE_OPERAND (expr, 1);
5785 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5786 dummy1, TREE_OPERAND (expr, 1));
5790 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5791 which we transform into an ADDR_EXPR with appropriate
5792 offset. For other arguments to the ADDR_EXPR we assume
5793 zero offset and as such do not care about the ADDR_EXPR
5794 type and strip possible nops from it. */
5795 else if (TREE_CODE (expr) == ADDR_EXPR)
5797 tree op0 = TREE_OPERAND (expr, 0);
5798 if (TREE_CODE (op0) == ARRAY_REF)
5800 tree idx = TREE_OPERAND (op0, 1);
5801 *base = TREE_OPERAND (op0, 0);
5802 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5803 array_ref_element_size (op0));
5807 /* Handle array-to-pointer decay as &a. */
5808 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5809 *base = TREE_OPERAND (expr, 0);
5812 *offset = NULL_TREE;
5816 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5817 else if (SSA_VAR_P (expr)
5818 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5821 *offset = NULL_TREE;
5829 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5830 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5831 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5832 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5833 COND is the first argument to CODE; otherwise (as in the example
5834 given here), it is the second argument. TYPE is the type of the
5835 original expression. Return NULL_TREE if no simplification is
5839 fold_binary_op_with_conditional_arg (enum tree_code code,
5840 tree type, tree op0, tree op1,
5841 tree cond, tree arg, int cond_first_p)
5843 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5844 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5845 tree test, true_value, false_value;
5846 tree lhs = NULL_TREE;
5847 tree rhs = NULL_TREE;
5849 /* This transformation is only worthwhile if we don't have to wrap
5850 arg in a SAVE_EXPR, and the operation can be simplified on at least
5851 one of the branches once its pushed inside the COND_EXPR. */
5852 if (!TREE_CONSTANT (arg))
5855 if (TREE_CODE (cond) == COND_EXPR)
5857 test = TREE_OPERAND (cond, 0);
5858 true_value = TREE_OPERAND (cond, 1);
5859 false_value = TREE_OPERAND (cond, 2);
5860 /* If this operand throws an expression, then it does not make
5861 sense to try to perform a logical or arithmetic operation
5863 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5865 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5870 tree testtype = TREE_TYPE (cond);
5872 true_value = constant_boolean_node (true, testtype);
5873 false_value = constant_boolean_node (false, testtype);
5876 arg = fold_convert (arg_type, arg);
5879 true_value = fold_convert (cond_type, true_value);
5881 lhs = fold_build2 (code, type, true_value, arg);
5883 lhs = fold_build2 (code, type, arg, true_value);
5887 false_value = fold_convert (cond_type, false_value);
5889 rhs = fold_build2 (code, type, false_value, arg);
5891 rhs = fold_build2 (code, type, arg, false_value);
5894 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5895 return fold_convert (type, test);
5899 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5901 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5902 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5903 ADDEND is the same as X.
5905 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5906 and finite. The problematic cases are when X is zero, and its mode
5907 has signed zeros. In the case of rounding towards -infinity,
5908 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5909 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5912 fold_real_zero_addition_p (tree type, tree addend, int negate)
5914 if (!real_zerop (addend))
5917 /* Don't allow the fold with -fsignaling-nans. */
5918 if (HONOR_SNANS (TYPE_MODE (type)))
5921 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5922 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5925 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5926 if (TREE_CODE (addend) == REAL_CST
5927 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5930 /* The mode has signed zeros, and we have to honor their sign.
5931 In this situation, there is only one case we can return true for.
5932 X - 0 is the same as X unless rounding towards -infinity is
5934 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5937 /* Subroutine of fold() that checks comparisons of built-in math
5938 functions against real constants.
5940 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5941 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5942 is the type of the result and ARG0 and ARG1 are the operands of the
5943 comparison. ARG1 must be a TREE_REAL_CST.
5945 The function returns the constant folded tree if a simplification
5946 can be made, and NULL_TREE otherwise. */
5949 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5950 tree type, tree arg0, tree arg1)
5954 if (BUILTIN_SQRT_P (fcode))
5956 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5957 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5959 c = TREE_REAL_CST (arg1);
5960 if (REAL_VALUE_NEGATIVE (c))
5962 /* sqrt(x) < y is always false, if y is negative. */
5963 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5964 return omit_one_operand (type, integer_zero_node, arg);
5966 /* sqrt(x) > y is always true, if y is negative and we
5967 don't care about NaNs, i.e. negative values of x. */
5968 if (code == NE_EXPR || !HONOR_NANS (mode))
5969 return omit_one_operand (type, integer_one_node, arg);
5971 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5972 return fold_build2 (GE_EXPR, type, arg,
5973 build_real (TREE_TYPE (arg), dconst0));
5975 else if (code == GT_EXPR || code == GE_EXPR)
5979 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5980 real_convert (&c2, mode, &c2);
5982 if (REAL_VALUE_ISINF (c2))
5984 /* sqrt(x) > y is x == +Inf, when y is very large. */
5985 if (HONOR_INFINITIES (mode))
5986 return fold_build2 (EQ_EXPR, type, arg,
5987 build_real (TREE_TYPE (arg), c2));
5989 /* sqrt(x) > y is always false, when y is very large
5990 and we don't care about infinities. */
5991 return omit_one_operand (type, integer_zero_node, arg);
5994 /* sqrt(x) > c is the same as x > c*c. */
5995 return fold_build2 (code, type, arg,
5996 build_real (TREE_TYPE (arg), c2));
5998 else if (code == LT_EXPR || code == LE_EXPR)
6002 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6003 real_convert (&c2, mode, &c2);
6005 if (REAL_VALUE_ISINF (c2))
6007 /* sqrt(x) < y is always true, when y is a very large
6008 value and we don't care about NaNs or Infinities. */
6009 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6010 return omit_one_operand (type, integer_one_node, arg);
6012 /* sqrt(x) < y is x != +Inf when y is very large and we
6013 don't care about NaNs. */
6014 if (! HONOR_NANS (mode))
6015 return fold_build2 (NE_EXPR, type, arg,
6016 build_real (TREE_TYPE (arg), c2));
6018 /* sqrt(x) < y is x >= 0 when y is very large and we
6019 don't care about Infinities. */
6020 if (! HONOR_INFINITIES (mode))
6021 return fold_build2 (GE_EXPR, type, arg,
6022 build_real (TREE_TYPE (arg), dconst0));
6024 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6025 if (lang_hooks.decls.global_bindings_p () != 0
6026 || CONTAINS_PLACEHOLDER_P (arg))
6029 arg = save_expr (arg);
6030 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6031 fold_build2 (GE_EXPR, type, arg,
6032 build_real (TREE_TYPE (arg),
6034 fold_build2 (NE_EXPR, type, arg,
6035 build_real (TREE_TYPE (arg),
6039 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6040 if (! HONOR_NANS (mode))
6041 return fold_build2 (code, type, arg,
6042 build_real (TREE_TYPE (arg), c2));
6044 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6045 if (lang_hooks.decls.global_bindings_p () == 0
6046 && ! CONTAINS_PLACEHOLDER_P (arg))
6048 arg = save_expr (arg);
6049 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6050 fold_build2 (GE_EXPR, type, arg,
6051 build_real (TREE_TYPE (arg),
6053 fold_build2 (code, type, arg,
6054 build_real (TREE_TYPE (arg),
6063 /* Subroutine of fold() that optimizes comparisons against Infinities,
6064 either +Inf or -Inf.
6066 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6067 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6068 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6070 The function returns the constant folded tree if a simplification
6071 can be made, and NULL_TREE otherwise. */
6074 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6076 enum machine_mode mode;
6077 REAL_VALUE_TYPE max;
6081 mode = TYPE_MODE (TREE_TYPE (arg0));
6083 /* For negative infinity swap the sense of the comparison. */
6084 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6086 code = swap_tree_comparison (code);
6091 /* x > +Inf is always false, if with ignore sNANs. */
6092 if (HONOR_SNANS (mode))
6094 return omit_one_operand (type, integer_zero_node, arg0);
6097 /* x <= +Inf is always true, if we don't case about NaNs. */
6098 if (! HONOR_NANS (mode))
6099 return omit_one_operand (type, integer_one_node, arg0);
6101 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6102 if (lang_hooks.decls.global_bindings_p () == 0
6103 && ! CONTAINS_PLACEHOLDER_P (arg0))
6105 arg0 = save_expr (arg0);
6106 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6112 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6113 real_maxval (&max, neg, mode);
6114 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6115 arg0, build_real (TREE_TYPE (arg0), max));
6118 /* x < +Inf is always equal to x <= DBL_MAX. */
6119 real_maxval (&max, neg, mode);
6120 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6121 arg0, build_real (TREE_TYPE (arg0), max));
6124 /* x != +Inf is always equal to !(x > DBL_MAX). */
6125 real_maxval (&max, neg, mode);
6126 if (! HONOR_NANS (mode))
6127 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6128 arg0, build_real (TREE_TYPE (arg0), max));
6130 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6131 arg0, build_real (TREE_TYPE (arg0), max));
6132 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6141 /* Subroutine of fold() that optimizes comparisons of a division by
6142 a nonzero integer constant against an integer constant, i.e.
6145 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6146 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6147 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6149 The function returns the constant folded tree if a simplification
6150 can be made, and NULL_TREE otherwise. */
6153 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6155 tree prod, tmp, hi, lo;
6156 tree arg00 = TREE_OPERAND (arg0, 0);
6157 tree arg01 = TREE_OPERAND (arg0, 1);
6158 unsigned HOST_WIDE_INT lpart;
6159 HOST_WIDE_INT hpart;
6160 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6164 /* We have to do this the hard way to detect unsigned overflow.
6165 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6166 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6167 TREE_INT_CST_HIGH (arg01),
6168 TREE_INT_CST_LOW (arg1),
6169 TREE_INT_CST_HIGH (arg1),
6170 &lpart, &hpart, unsigned_p);
6171 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6173 neg_overflow = false;
6177 tmp = int_const_binop (MINUS_EXPR, arg01,
6178 build_int_cst (TREE_TYPE (arg01), 1), 0);
6181 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6182 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6183 TREE_INT_CST_HIGH (prod),
6184 TREE_INT_CST_LOW (tmp),
6185 TREE_INT_CST_HIGH (tmp),
6186 &lpart, &hpart, unsigned_p);
6187 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6188 -1, overflow | TREE_OVERFLOW (prod));
6190 else if (tree_int_cst_sgn (arg01) >= 0)
6192 tmp = int_const_binop (MINUS_EXPR, arg01,
6193 build_int_cst (TREE_TYPE (arg01), 1), 0);
6194 switch (tree_int_cst_sgn (arg1))
6197 neg_overflow = true;
6198 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6203 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6208 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6218 /* A negative divisor reverses the relational operators. */
6219 code = swap_tree_comparison (code);
6221 tmp = int_const_binop (PLUS_EXPR, arg01,
6222 build_int_cst (TREE_TYPE (arg01), 1), 0);
6223 switch (tree_int_cst_sgn (arg1))
6226 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6231 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6236 neg_overflow = true;
6237 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6249 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6250 return omit_one_operand (type, integer_zero_node, arg00);
6251 if (TREE_OVERFLOW (hi))
6252 return fold_build2 (GE_EXPR, type, arg00, lo);
6253 if (TREE_OVERFLOW (lo))
6254 return fold_build2 (LE_EXPR, type, arg00, hi);
6255 return build_range_check (type, arg00, 1, lo, hi);
6258 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6259 return omit_one_operand (type, integer_one_node, arg00);
6260 if (TREE_OVERFLOW (hi))
6261 return fold_build2 (LT_EXPR, type, arg00, lo);
6262 if (TREE_OVERFLOW (lo))
6263 return fold_build2 (GT_EXPR, type, arg00, hi);
6264 return build_range_check (type, arg00, 0, lo, hi);
6267 if (TREE_OVERFLOW (lo))
6269 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6270 return omit_one_operand (type, tmp, arg00);
6272 return fold_build2 (LT_EXPR, type, arg00, lo);
6275 if (TREE_OVERFLOW (hi))
6277 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6278 return omit_one_operand (type, tmp, arg00);
6280 return fold_build2 (LE_EXPR, type, arg00, hi);
6283 if (TREE_OVERFLOW (hi))
6285 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6286 return omit_one_operand (type, tmp, arg00);
6288 return fold_build2 (GT_EXPR, type, arg00, hi);
6291 if (TREE_OVERFLOW (lo))
6293 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6294 return omit_one_operand (type, tmp, arg00);
6296 return fold_build2 (GE_EXPR, type, arg00, lo);
6306 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6307 equality/inequality test, then return a simplified form of the test
6308 using a sign testing. Otherwise return NULL. TYPE is the desired
6312 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6315 /* If this is testing a single bit, we can optimize the test. */
6316 if ((code == NE_EXPR || code == EQ_EXPR)
6317 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6318 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6320 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6321 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6322 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6324 if (arg00 != NULL_TREE
6325 /* This is only a win if casting to a signed type is cheap,
6326 i.e. when arg00's type is not a partial mode. */
6327 && TYPE_PRECISION (TREE_TYPE (arg00))
6328 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6330 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6331 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6332 result_type, fold_convert (stype, arg00),
6333 build_int_cst (stype, 0));
6340 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6341 equality/inequality test, then return a simplified form of
6342 the test using shifts and logical operations. Otherwise return
6343 NULL. TYPE is the desired result type. */
6346 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6349 /* If this is testing a single bit, we can optimize the test. */
6350 if ((code == NE_EXPR || code == EQ_EXPR)
6351 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6352 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6354 tree inner = TREE_OPERAND (arg0, 0);
6355 tree type = TREE_TYPE (arg0);
6356 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6357 enum machine_mode operand_mode = TYPE_MODE (type);
6359 tree signed_type, unsigned_type, intermediate_type;
6362 /* First, see if we can fold the single bit test into a sign-bit
6364 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6369 /* Otherwise we have (A & C) != 0 where C is a single bit,
6370 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6371 Similarly for (A & C) == 0. */
6373 /* If INNER is a right shift of a constant and it plus BITNUM does
6374 not overflow, adjust BITNUM and INNER. */
6375 if (TREE_CODE (inner) == RSHIFT_EXPR
6376 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6377 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6378 && bitnum < TYPE_PRECISION (type)
6379 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6380 bitnum - TYPE_PRECISION (type)))
6382 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6383 inner = TREE_OPERAND (inner, 0);
6386 /* If we are going to be able to omit the AND below, we must do our
6387 operations as unsigned. If we must use the AND, we have a choice.
6388 Normally unsigned is faster, but for some machines signed is. */
6389 #ifdef LOAD_EXTEND_OP
6390 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6391 && !flag_syntax_only) ? 0 : 1;
6396 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6397 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6398 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6399 inner = fold_convert (intermediate_type, inner);
6402 inner = build2 (RSHIFT_EXPR, intermediate_type,
6403 inner, size_int (bitnum));
6405 one = build_int_cst (intermediate_type, 1);
6407 if (code == EQ_EXPR)
6408 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6410 /* Put the AND last so it can combine with more things. */
6411 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6413 /* Make sure to return the proper type. */
6414 inner = fold_convert (result_type, inner);
6421 /* Check whether we are allowed to reorder operands arg0 and arg1,
6422 such that the evaluation of arg1 occurs before arg0. */
6425 reorder_operands_p (tree arg0, tree arg1)
6427 if (! flag_evaluation_order)
6429 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6431 return ! TREE_SIDE_EFFECTS (arg0)
6432 && ! TREE_SIDE_EFFECTS (arg1);
6435 /* Test whether it is preferable two swap two operands, ARG0 and
6436 ARG1, for example because ARG0 is an integer constant and ARG1
6437 isn't. If REORDER is true, only recommend swapping if we can
6438 evaluate the operands in reverse order. */
6441 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6443 STRIP_SIGN_NOPS (arg0);
6444 STRIP_SIGN_NOPS (arg1);
6446 if (TREE_CODE (arg1) == INTEGER_CST)
6448 if (TREE_CODE (arg0) == INTEGER_CST)
6451 if (TREE_CODE (arg1) == REAL_CST)
6453 if (TREE_CODE (arg0) == REAL_CST)
6456 if (TREE_CODE (arg1) == COMPLEX_CST)
6458 if (TREE_CODE (arg0) == COMPLEX_CST)
6461 if (TREE_CONSTANT (arg1))
6463 if (TREE_CONSTANT (arg0))
6469 if (reorder && flag_evaluation_order
6470 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6478 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6479 for commutative and comparison operators. Ensuring a canonical
6480 form allows the optimizers to find additional redundancies without
6481 having to explicitly check for both orderings. */
6482 if (TREE_CODE (arg0) == SSA_NAME
6483 && TREE_CODE (arg1) == SSA_NAME
6484 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6490 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6491 ARG0 is extended to a wider type. */
6494 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6496 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6498 tree shorter_type, outer_type;
6502 if (arg0_unw == arg0)
6504 shorter_type = TREE_TYPE (arg0_unw);
6506 #ifdef HAVE_canonicalize_funcptr_for_compare
6507 /* Disable this optimization if we're casting a function pointer
6508 type on targets that require function pointer canonicalization. */
6509 if (HAVE_canonicalize_funcptr_for_compare
6510 && TREE_CODE (shorter_type) == POINTER_TYPE
6511 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6515 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6518 arg1_unw = get_unwidened (arg1, shorter_type);
6520 /* If possible, express the comparison in the shorter mode. */
6521 if ((code == EQ_EXPR || code == NE_EXPR
6522 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6523 && (TREE_TYPE (arg1_unw) == shorter_type
6524 || (TREE_CODE (arg1_unw) == INTEGER_CST
6525 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6526 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6527 && int_fits_type_p (arg1_unw, shorter_type))))
6528 return fold_build2 (code, type, arg0_unw,
6529 fold_convert (shorter_type, arg1_unw));
6531 if (TREE_CODE (arg1_unw) != INTEGER_CST
6532 || TREE_CODE (shorter_type) != INTEGER_TYPE
6533 || !int_fits_type_p (arg1_unw, shorter_type))
6536 /* If we are comparing with the integer that does not fit into the range
6537 of the shorter type, the result is known. */
6538 outer_type = TREE_TYPE (arg1_unw);
6539 min = lower_bound_in_type (outer_type, shorter_type);
6540 max = upper_bound_in_type (outer_type, shorter_type);
6542 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6544 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6551 return omit_one_operand (type, integer_zero_node, arg0);
6556 return omit_one_operand (type, integer_one_node, arg0);
6562 return omit_one_operand (type, integer_one_node, arg0);
6564 return omit_one_operand (type, integer_zero_node, arg0);
6569 return omit_one_operand (type, integer_zero_node, arg0);
6571 return omit_one_operand (type, integer_one_node, arg0);
6580 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6581 ARG0 just the signedness is changed. */
6584 fold_sign_changed_comparison (enum tree_code code, tree type,
6585 tree arg0, tree arg1)
6588 tree inner_type, outer_type;
6590 if (TREE_CODE (arg0) != NOP_EXPR
6591 && TREE_CODE (arg0) != CONVERT_EXPR)
6594 outer_type = TREE_TYPE (arg0);
6595 arg0_inner = TREE_OPERAND (arg0, 0);
6596 inner_type = TREE_TYPE (arg0_inner);
6598 #ifdef HAVE_canonicalize_funcptr_for_compare
6599 /* Disable this optimization if we're casting a function pointer
6600 type on targets that require function pointer canonicalization. */
6601 if (HAVE_canonicalize_funcptr_for_compare
6602 && TREE_CODE (inner_type) == POINTER_TYPE
6603 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6607 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6610 if (TREE_CODE (arg1) != INTEGER_CST
6611 && !((TREE_CODE (arg1) == NOP_EXPR
6612 || TREE_CODE (arg1) == CONVERT_EXPR)
6613 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6616 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6621 if (TREE_CODE (arg1) == INTEGER_CST)
6622 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6623 TREE_INT_CST_HIGH (arg1), 0,
6624 TREE_OVERFLOW (arg1));
6626 arg1 = fold_convert (inner_type, arg1);
6628 return fold_build2 (code, type, arg0_inner, arg1);
6631 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6632 step of the array. Reconstructs s and delta in the case of s * delta
6633 being an integer constant (and thus already folded).
6634 ADDR is the address. MULT is the multiplicative expression.
6635 If the function succeeds, the new address expression is returned. Otherwise
6636 NULL_TREE is returned. */
6639 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6641 tree s, delta, step;
6642 tree ref = TREE_OPERAND (addr, 0), pref;
6647 /* Canonicalize op1 into a possibly non-constant delta
6648 and an INTEGER_CST s. */
6649 if (TREE_CODE (op1) == MULT_EXPR)
6651 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6656 if (TREE_CODE (arg0) == INTEGER_CST)
6661 else if (TREE_CODE (arg1) == INTEGER_CST)
6669 else if (TREE_CODE (op1) == INTEGER_CST)
6676 /* Simulate we are delta * 1. */
6678 s = integer_one_node;
6681 for (;; ref = TREE_OPERAND (ref, 0))
6683 if (TREE_CODE (ref) == ARRAY_REF)
6685 /* Remember if this was a multi-dimensional array. */
6686 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6689 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6693 step = array_ref_element_size (ref);
6694 if (TREE_CODE (step) != INTEGER_CST)
6699 if (! tree_int_cst_equal (step, s))
6704 /* Try if delta is a multiple of step. */
6705 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6711 /* Only fold here if we can verify we do not overflow one
6712 dimension of a multi-dimensional array. */
6717 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6718 || !INTEGRAL_TYPE_P (itype)
6719 || !TYPE_MAX_VALUE (itype)
6720 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6723 tmp = fold_binary (code, itype,
6724 fold_convert (itype,
6725 TREE_OPERAND (ref, 1)),
6726 fold_convert (itype, delta));
6728 || TREE_CODE (tmp) != INTEGER_CST
6729 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6738 if (!handled_component_p (ref))
6742 /* We found the suitable array reference. So copy everything up to it,
6743 and replace the index. */
6745 pref = TREE_OPERAND (addr, 0);
6746 ret = copy_node (pref);
6751 pref = TREE_OPERAND (pref, 0);
6752 TREE_OPERAND (pos, 0) = copy_node (pref);
6753 pos = TREE_OPERAND (pos, 0);
6756 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6757 fold_convert (itype,
6758 TREE_OPERAND (pos, 1)),
6759 fold_convert (itype, delta));
6761 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6765 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6766 means A >= Y && A != MAX, but in this case we know that
6767 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6770 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6772 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6774 if (TREE_CODE (bound) == LT_EXPR)
6775 a = TREE_OPERAND (bound, 0);
6776 else if (TREE_CODE (bound) == GT_EXPR)
6777 a = TREE_OPERAND (bound, 1);
6781 typea = TREE_TYPE (a);
6782 if (!INTEGRAL_TYPE_P (typea)
6783 && !POINTER_TYPE_P (typea))
6786 if (TREE_CODE (ineq) == LT_EXPR)
6788 a1 = TREE_OPERAND (ineq, 1);
6789 y = TREE_OPERAND (ineq, 0);
6791 else if (TREE_CODE (ineq) == GT_EXPR)
6793 a1 = TREE_OPERAND (ineq, 0);
6794 y = TREE_OPERAND (ineq, 1);
6799 if (TREE_TYPE (a1) != typea)
6802 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6803 if (!integer_onep (diff))
6806 return fold_build2 (GE_EXPR, type, a, y);
6809 /* Fold a sum or difference of at least one multiplication.
6810 Returns the folded tree or NULL if no simplification could be made. */
6813 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6815 tree arg00, arg01, arg10, arg11;
6816 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6818 /* (A * C) +- (B * C) -> (A+-B) * C.
6819 (A * C) +- A -> A * (C+-1).
6820 We are most concerned about the case where C is a constant,
6821 but other combinations show up during loop reduction. Since
6822 it is not difficult, try all four possibilities. */
6824 if (TREE_CODE (arg0) == MULT_EXPR)
6826 arg00 = TREE_OPERAND (arg0, 0);
6827 arg01 = TREE_OPERAND (arg0, 1);
6832 arg01 = build_one_cst (type);
6834 if (TREE_CODE (arg1) == MULT_EXPR)
6836 arg10 = TREE_OPERAND (arg1, 0);
6837 arg11 = TREE_OPERAND (arg1, 1);
6842 arg11 = build_one_cst (type);
6846 if (operand_equal_p (arg01, arg11, 0))
6847 same = arg01, alt0 = arg00, alt1 = arg10;
6848 else if (operand_equal_p (arg00, arg10, 0))
6849 same = arg00, alt0 = arg01, alt1 = arg11;
6850 else if (operand_equal_p (arg00, arg11, 0))
6851 same = arg00, alt0 = arg01, alt1 = arg10;
6852 else if (operand_equal_p (arg01, arg10, 0))
6853 same = arg01, alt0 = arg00, alt1 = arg11;
6855 /* No identical multiplicands; see if we can find a common
6856 power-of-two factor in non-power-of-two multiplies. This
6857 can help in multi-dimensional array access. */
6858 else if (host_integerp (arg01, 0)
6859 && host_integerp (arg11, 0))
6861 HOST_WIDE_INT int01, int11, tmp;
6864 int01 = TREE_INT_CST_LOW (arg01);
6865 int11 = TREE_INT_CST_LOW (arg11);
6867 /* Move min of absolute values to int11. */
6868 if ((int01 >= 0 ? int01 : -int01)
6869 < (int11 >= 0 ? int11 : -int11))
6871 tmp = int01, int01 = int11, int11 = tmp;
6872 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6879 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6881 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6882 build_int_cst (TREE_TYPE (arg00),
6887 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6892 return fold_build2 (MULT_EXPR, type,
6893 fold_build2 (code, type,
6894 fold_convert (type, alt0),
6895 fold_convert (type, alt1)),
6896 fold_convert (type, same));
6901 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6902 specified by EXPR into the buffer PTR of length LEN bytes.
6903 Return the number of bytes placed in the buffer, or zero
6907 native_encode_int (tree expr, unsigned char *ptr, int len)
6909 tree type = TREE_TYPE (expr);
6910 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6911 int byte, offset, word, words;
6912 unsigned char value;
6914 if (total_bytes > len)
6916 words = total_bytes / UNITS_PER_WORD;
6918 for (byte = 0; byte < total_bytes; byte++)
6920 int bitpos = byte * BITS_PER_UNIT;
6921 if (bitpos < HOST_BITS_PER_WIDE_INT)
6922 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6924 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6925 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6927 if (total_bytes > UNITS_PER_WORD)
6929 word = byte / UNITS_PER_WORD;
6930 if (WORDS_BIG_ENDIAN)
6931 word = (words - 1) - word;
6932 offset = word * UNITS_PER_WORD;
6933 if (BYTES_BIG_ENDIAN)
6934 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6936 offset += byte % UNITS_PER_WORD;
6939 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6940 ptr[offset] = value;
6946 /* Subroutine of native_encode_expr. Encode the REAL_CST
6947 specified by EXPR into the buffer PTR of length LEN bytes.
6948 Return the number of bytes placed in the buffer, or zero
6952 native_encode_real (tree expr, unsigned char *ptr, int len)
6954 tree type = TREE_TYPE (expr);
6955 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6956 int byte, offset, word, words;
6957 unsigned char value;
6959 /* There are always 32 bits in each long, no matter the size of
6960 the hosts long. We handle floating point representations with
6964 if (total_bytes > len)
6966 words = total_bytes / UNITS_PER_WORD;
6968 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6970 for (byte = 0; byte < total_bytes; byte++)
6972 int bitpos = byte * BITS_PER_UNIT;
6973 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6975 if (total_bytes > UNITS_PER_WORD)
6977 word = byte / UNITS_PER_WORD;
6978 if (FLOAT_WORDS_BIG_ENDIAN)
6979 word = (words - 1) - word;
6980 offset = word * UNITS_PER_WORD;
6981 if (BYTES_BIG_ENDIAN)
6982 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6984 offset += byte % UNITS_PER_WORD;
6987 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6988 ptr[offset] = value;
6993 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6994 specified by EXPR into the buffer PTR of length LEN bytes.
6995 Return the number of bytes placed in the buffer, or zero
6999 native_encode_complex (tree expr, unsigned char *ptr, int len)
7004 part = TREE_REALPART (expr);
7005 rsize = native_encode_expr (part, ptr, len);
7008 part = TREE_IMAGPART (expr);
7009 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7012 return rsize + isize;
7016 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7017 specified by EXPR into the buffer PTR of length LEN bytes.
7018 Return the number of bytes placed in the buffer, or zero
7022 native_encode_vector (tree expr, unsigned char *ptr, int len)
7024 int i, size, offset, count;
7025 tree itype, elem, elements;
7028 elements = TREE_VECTOR_CST_ELTS (expr);
7029 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7030 itype = TREE_TYPE (TREE_TYPE (expr));
7031 size = GET_MODE_SIZE (TYPE_MODE (itype));
7032 for (i = 0; i < count; i++)
7036 elem = TREE_VALUE (elements);
7037 elements = TREE_CHAIN (elements);
7044 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7049 if (offset + size > len)
7051 memset (ptr+offset, 0, size);
7059 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7060 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7061 buffer PTR of length LEN bytes. Return the number of bytes
7062 placed in the buffer, or zero upon failure. */
7065 native_encode_expr (tree expr, unsigned char *ptr, int len)
7067 switch (TREE_CODE (expr))
7070 return native_encode_int (expr, ptr, len);
7073 return native_encode_real (expr, ptr, len);
7076 return native_encode_complex (expr, ptr, len);
7079 return native_encode_vector (expr, ptr, len);
7087 /* Subroutine of native_interpret_expr. Interpret the contents of
7088 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7089 If the buffer cannot be interpreted, return NULL_TREE. */
7092 native_interpret_int (tree type, unsigned char *ptr, int len)
7094 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7095 int byte, offset, word, words;
7096 unsigned char value;
7097 unsigned int HOST_WIDE_INT lo = 0;
7098 HOST_WIDE_INT hi = 0;
7100 if (total_bytes > len)
7102 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7104 words = total_bytes / UNITS_PER_WORD;
7106 for (byte = 0; byte < total_bytes; byte++)
7108 int bitpos = byte * BITS_PER_UNIT;
7109 if (total_bytes > UNITS_PER_WORD)
7111 word = byte / UNITS_PER_WORD;
7112 if (WORDS_BIG_ENDIAN)
7113 word = (words - 1) - word;
7114 offset = word * UNITS_PER_WORD;
7115 if (BYTES_BIG_ENDIAN)
7116 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7118 offset += byte % UNITS_PER_WORD;
7121 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7122 value = ptr[offset];
7124 if (bitpos < HOST_BITS_PER_WIDE_INT)
7125 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7127 hi |= (unsigned HOST_WIDE_INT) value
7128 << (bitpos - HOST_BITS_PER_WIDE_INT);
7131 return build_int_cst_wide_type (type, lo, hi);
7135 /* Subroutine of native_interpret_expr. Interpret the contents of
7136 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7137 If the buffer cannot be interpreted, return NULL_TREE. */
7140 native_interpret_real (tree type, unsigned char *ptr, int len)
7142 enum machine_mode mode = TYPE_MODE (type);
7143 int total_bytes = GET_MODE_SIZE (mode);
7144 int byte, offset, word, words;
7145 unsigned char value;
7146 /* There are always 32 bits in each long, no matter the size of
7147 the hosts long. We handle floating point representations with
7152 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7153 if (total_bytes > len || total_bytes > 24)
7155 words = total_bytes / UNITS_PER_WORD;
7157 memset (tmp, 0, sizeof (tmp));
7158 for (byte = 0; byte < total_bytes; byte++)
7160 int bitpos = byte * BITS_PER_UNIT;
7161 if (total_bytes > UNITS_PER_WORD)
7163 word = byte / UNITS_PER_WORD;
7164 if (FLOAT_WORDS_BIG_ENDIAN)
7165 word = (words - 1) - word;
7166 offset = word * UNITS_PER_WORD;
7167 if (BYTES_BIG_ENDIAN)
7168 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7170 offset += byte % UNITS_PER_WORD;
7173 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7174 value = ptr[offset];
7176 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7179 real_from_target (&r, tmp, mode);
7180 return build_real (type, r);
7184 /* Subroutine of native_interpret_expr. Interpret the contents of
7185 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7186 If the buffer cannot be interpreted, return NULL_TREE. */
7189 native_interpret_complex (tree type, unsigned char *ptr, int len)
7191 tree etype, rpart, ipart;
7194 etype = TREE_TYPE (type);
7195 size = GET_MODE_SIZE (TYPE_MODE (etype));
7198 rpart = native_interpret_expr (etype, ptr, size);
7201 ipart = native_interpret_expr (etype, ptr+size, size);
7204 return build_complex (type, rpart, ipart);
7208 /* Subroutine of native_interpret_expr. Interpret the contents of
7209 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7210 If the buffer cannot be interpreted, return NULL_TREE. */
7213 native_interpret_vector (tree type, unsigned char *ptr, int len)
7215 tree etype, elem, elements;
7218 etype = TREE_TYPE (type);
7219 size = GET_MODE_SIZE (TYPE_MODE (etype));
7220 count = TYPE_VECTOR_SUBPARTS (type);
7221 if (size * count > len)
7224 elements = NULL_TREE;
7225 for (i = count - 1; i >= 0; i--)
7227 elem = native_interpret_expr (etype, ptr+(i*size), size);
7230 elements = tree_cons (NULL_TREE, elem, elements);
7232 return build_vector (type, elements);
7236 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7237 the buffer PTR of length LEN as a constant of type TYPE. For
7238 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7239 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7240 return NULL_TREE. */
7243 native_interpret_expr (tree type, unsigned char *ptr, int len)
7245 switch (TREE_CODE (type))
7250 return native_interpret_int (type, ptr, len);
7253 return native_interpret_real (type, ptr, len);
7256 return native_interpret_complex (type, ptr, len);
7259 return native_interpret_vector (type, ptr, len);
7267 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7268 TYPE at compile-time. If we're unable to perform the conversion
7269 return NULL_TREE. */
7272 fold_view_convert_expr (tree type, tree expr)
7274 /* We support up to 512-bit values (for V8DFmode). */
7275 unsigned char buffer[64];
7278 /* Check that the host and target are sane. */
7279 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7282 len = native_encode_expr (expr, buffer, sizeof (buffer));
7286 return native_interpret_expr (type, buffer, len);
7290 /* Fold a unary expression of code CODE and type TYPE with operand
7291 OP0. Return the folded expression if folding is successful.
7292 Otherwise, return NULL_TREE. */
7295 fold_unary (enum tree_code code, tree type, tree op0)
7299 enum tree_code_class kind = TREE_CODE_CLASS (code);
7301 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7302 && TREE_CODE_LENGTH (code) == 1);
7307 if (code == NOP_EXPR || code == CONVERT_EXPR
7308 || code == FLOAT_EXPR || code == ABS_EXPR)
7310 /* Don't use STRIP_NOPS, because signedness of argument type
7312 STRIP_SIGN_NOPS (arg0);
7316 /* Strip any conversions that don't change the mode. This
7317 is safe for every expression, except for a comparison
7318 expression because its signedness is derived from its
7321 Note that this is done as an internal manipulation within
7322 the constant folder, in order to find the simplest
7323 representation of the arguments so that their form can be
7324 studied. In any cases, the appropriate type conversions
7325 should be put back in the tree that will get out of the
7331 if (TREE_CODE_CLASS (code) == tcc_unary)
7333 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7334 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7335 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7336 else if (TREE_CODE (arg0) == COND_EXPR)
7338 tree arg01 = TREE_OPERAND (arg0, 1);
7339 tree arg02 = TREE_OPERAND (arg0, 2);
7340 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7341 arg01 = fold_build1 (code, type, arg01);
7342 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7343 arg02 = fold_build1 (code, type, arg02);
7344 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7347 /* If this was a conversion, and all we did was to move into
7348 inside the COND_EXPR, bring it back out. But leave it if
7349 it is a conversion from integer to integer and the
7350 result precision is no wider than a word since such a
7351 conversion is cheap and may be optimized away by combine,
7352 while it couldn't if it were outside the COND_EXPR. Then return
7353 so we don't get into an infinite recursion loop taking the
7354 conversion out and then back in. */
7356 if ((code == NOP_EXPR || code == CONVERT_EXPR
7357 || code == NON_LVALUE_EXPR)
7358 && TREE_CODE (tem) == COND_EXPR
7359 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7360 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7361 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7362 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7363 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7364 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7365 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7367 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7368 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7369 || flag_syntax_only))
7370 tem = build1 (code, type,
7372 TREE_TYPE (TREE_OPERAND
7373 (TREE_OPERAND (tem, 1), 0)),
7374 TREE_OPERAND (tem, 0),
7375 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7376 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7379 else if (COMPARISON_CLASS_P (arg0))
7381 if (TREE_CODE (type) == BOOLEAN_TYPE)
7383 arg0 = copy_node (arg0);
7384 TREE_TYPE (arg0) = type;
7387 else if (TREE_CODE (type) != INTEGER_TYPE)
7388 return fold_build3 (COND_EXPR, type, arg0,
7389 fold_build1 (code, type,
7391 fold_build1 (code, type,
7392 integer_zero_node));
7401 case FIX_TRUNC_EXPR:
7402 if (TREE_TYPE (op0) == type)
7405 /* If we have (type) (a CMP b) and type is an integral type, return
7406 new expression involving the new type. */
7407 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7408 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7409 TREE_OPERAND (op0, 1));
7411 /* Handle cases of two conversions in a row. */
7412 if (TREE_CODE (op0) == NOP_EXPR
7413 || TREE_CODE (op0) == CONVERT_EXPR)
7415 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7416 tree inter_type = TREE_TYPE (op0);
7417 int inside_int = INTEGRAL_TYPE_P (inside_type);
7418 int inside_ptr = POINTER_TYPE_P (inside_type);
7419 int inside_float = FLOAT_TYPE_P (inside_type);
7420 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7421 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7422 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7423 int inter_int = INTEGRAL_TYPE_P (inter_type);
7424 int inter_ptr = POINTER_TYPE_P (inter_type);
7425 int inter_float = FLOAT_TYPE_P (inter_type);
7426 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7427 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7428 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7429 int final_int = INTEGRAL_TYPE_P (type);
7430 int final_ptr = POINTER_TYPE_P (type);
7431 int final_float = FLOAT_TYPE_P (type);
7432 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7433 unsigned int final_prec = TYPE_PRECISION (type);
7434 int final_unsignedp = TYPE_UNSIGNED (type);
7436 /* In addition to the cases of two conversions in a row
7437 handled below, if we are converting something to its own
7438 type via an object of identical or wider precision, neither
7439 conversion is needed. */
7440 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7441 && (((inter_int || inter_ptr) && final_int)
7442 || (inter_float && final_float))
7443 && inter_prec >= final_prec)
7444 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7446 /* Likewise, if the intermediate and final types are either both
7447 float or both integer, we don't need the middle conversion if
7448 it is wider than the final type and doesn't change the signedness
7449 (for integers). Avoid this if the final type is a pointer
7450 since then we sometimes need the inner conversion. Likewise if
7451 the outer has a precision not equal to the size of its mode. */
7452 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7453 || (inter_float && inside_float)
7454 || (inter_vec && inside_vec))
7455 && inter_prec >= inside_prec
7456 && (inter_float || inter_vec
7457 || inter_unsignedp == inside_unsignedp)
7458 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7459 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7461 && (! final_vec || inter_prec == inside_prec))
7462 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7464 /* If we have a sign-extension of a zero-extended value, we can
7465 replace that by a single zero-extension. */
7466 if (inside_int && inter_int && final_int
7467 && inside_prec < inter_prec && inter_prec < final_prec
7468 && inside_unsignedp && !inter_unsignedp)
7469 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7471 /* Two conversions in a row are not needed unless:
7472 - some conversion is floating-point (overstrict for now), or
7473 - some conversion is a vector (overstrict for now), or
7474 - the intermediate type is narrower than both initial and
7476 - the intermediate type and innermost type differ in signedness,
7477 and the outermost type is wider than the intermediate, or
7478 - the initial type is a pointer type and the precisions of the
7479 intermediate and final types differ, or
7480 - the final type is a pointer type and the precisions of the
7481 initial and intermediate types differ.
7482 - the final type is a pointer type and the initial type not
7483 - the initial type is a pointer to an array and the final type
7485 if (! inside_float && ! inter_float && ! final_float
7486 && ! inside_vec && ! inter_vec && ! final_vec
7487 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7488 && ! (inside_int && inter_int
7489 && inter_unsignedp != inside_unsignedp
7490 && inter_prec < final_prec)
7491 && ((inter_unsignedp && inter_prec > inside_prec)
7492 == (final_unsignedp && final_prec > inter_prec))
7493 && ! (inside_ptr && inter_prec != final_prec)
7494 && ! (final_ptr && inside_prec != inter_prec)
7495 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7496 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7497 && final_ptr == inside_ptr
7499 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7500 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7501 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7504 /* Handle (T *)&A.B.C for A being of type T and B and C
7505 living at offset zero. This occurs frequently in
7506 C++ upcasting and then accessing the base. */
7507 if (TREE_CODE (op0) == ADDR_EXPR
7508 && POINTER_TYPE_P (type)
7509 && handled_component_p (TREE_OPERAND (op0, 0)))
7511 HOST_WIDE_INT bitsize, bitpos;
7513 enum machine_mode mode;
7514 int unsignedp, volatilep;
7515 tree base = TREE_OPERAND (op0, 0);
7516 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7517 &mode, &unsignedp, &volatilep, false);
7518 /* If the reference was to a (constant) zero offset, we can use
7519 the address of the base if it has the same base type
7520 as the result type. */
7521 if (! offset && bitpos == 0
7522 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7523 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7524 return fold_convert (type, build_fold_addr_expr (base));
7527 if ((TREE_CODE (op0) == MODIFY_EXPR
7528 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7529 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7530 /* Detect assigning a bitfield. */
7531 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7533 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7535 /* Don't leave an assignment inside a conversion
7536 unless assigning a bitfield. */
7537 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7538 /* First do the assignment, then return converted constant. */
7539 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7540 TREE_NO_WARNING (tem) = 1;
7541 TREE_USED (tem) = 1;
7545 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7546 constants (if x has signed type, the sign bit cannot be set
7547 in c). This folds extension into the BIT_AND_EXPR. */
7548 if (INTEGRAL_TYPE_P (type)
7549 && TREE_CODE (type) != BOOLEAN_TYPE
7550 && TREE_CODE (op0) == BIT_AND_EXPR
7551 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7554 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7557 if (TYPE_UNSIGNED (TREE_TYPE (and))
7558 || (TYPE_PRECISION (type)
7559 <= TYPE_PRECISION (TREE_TYPE (and))))
7561 else if (TYPE_PRECISION (TREE_TYPE (and1))
7562 <= HOST_BITS_PER_WIDE_INT
7563 && host_integerp (and1, 1))
7565 unsigned HOST_WIDE_INT cst;
7567 cst = tree_low_cst (and1, 1);
7568 cst &= (HOST_WIDE_INT) -1
7569 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7570 change = (cst == 0);
7571 #ifdef LOAD_EXTEND_OP
7573 && !flag_syntax_only
7574 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7577 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7578 and0 = fold_convert (uns, and0);
7579 and1 = fold_convert (uns, and1);
7585 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7586 TREE_INT_CST_HIGH (and1), 0,
7587 TREE_OVERFLOW (and1));
7588 return fold_build2 (BIT_AND_EXPR, type,
7589 fold_convert (type, and0), tem);
7593 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7594 T2 being pointers to types of the same size. */
7595 if (POINTER_TYPE_P (type)
7596 && BINARY_CLASS_P (arg0)
7597 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7598 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7600 tree arg00 = TREE_OPERAND (arg0, 0);
7602 tree t1 = TREE_TYPE (arg00);
7603 tree tt0 = TREE_TYPE (t0);
7604 tree tt1 = TREE_TYPE (t1);
7605 tree s0 = TYPE_SIZE (tt0);
7606 tree s1 = TYPE_SIZE (tt1);
7608 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7609 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7610 TREE_OPERAND (arg0, 1));
7613 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7614 of the same precision, and X is a integer type not narrower than
7615 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7616 if (INTEGRAL_TYPE_P (type)
7617 && TREE_CODE (op0) == BIT_NOT_EXPR
7618 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7619 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7620 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7621 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7623 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7624 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7625 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7626 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7629 tem = fold_convert_const (code, type, arg0);
7630 return tem ? tem : NULL_TREE;
7632 case VIEW_CONVERT_EXPR:
7633 if (TREE_TYPE (op0) == type)
7635 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7636 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7637 return fold_view_convert_expr (type, op0);
7640 tem = fold_negate_expr (arg0);
7642 return fold_convert (type, tem);
7646 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7647 return fold_abs_const (arg0, type);
7648 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7649 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7650 /* Convert fabs((double)float) into (double)fabsf(float). */
7651 else if (TREE_CODE (arg0) == NOP_EXPR
7652 && TREE_CODE (type) == REAL_TYPE)
7654 tree targ0 = strip_float_extensions (arg0);
7656 return fold_convert (type, fold_build1 (ABS_EXPR,
7660 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7661 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7664 /* Strip sign ops from argument. */
7665 if (TREE_CODE (type) == REAL_TYPE)
7667 tem = fold_strip_sign_ops (arg0);
7669 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7674 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7675 return fold_convert (type, arg0);
7676 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7678 tree itype = TREE_TYPE (type);
7679 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7680 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7681 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7683 if (TREE_CODE (arg0) == COMPLEX_CST)
7685 tree itype = TREE_TYPE (type);
7686 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7687 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7688 return build_complex (type, rpart, negate_expr (ipart));
7690 if (TREE_CODE (arg0) == CONJ_EXPR)
7691 return fold_convert (type, TREE_OPERAND (arg0, 0));
7695 if (TREE_CODE (arg0) == INTEGER_CST)
7696 return fold_not_const (arg0, type);
7697 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7698 return TREE_OPERAND (arg0, 0);
7699 /* Convert ~ (-A) to A - 1. */
7700 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7701 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7702 build_int_cst (type, 1));
7703 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7704 else if (INTEGRAL_TYPE_P (type)
7705 && ((TREE_CODE (arg0) == MINUS_EXPR
7706 && integer_onep (TREE_OPERAND (arg0, 1)))
7707 || (TREE_CODE (arg0) == PLUS_EXPR
7708 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7709 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7710 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7711 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7712 && (tem = fold_unary (BIT_NOT_EXPR, type,
7714 TREE_OPERAND (arg0, 0)))))
7715 return fold_build2 (BIT_XOR_EXPR, type, tem,
7716 fold_convert (type, TREE_OPERAND (arg0, 1)));
7717 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7718 && (tem = fold_unary (BIT_NOT_EXPR, type,
7720 TREE_OPERAND (arg0, 1)))))
7721 return fold_build2 (BIT_XOR_EXPR, type,
7722 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7726 case TRUTH_NOT_EXPR:
7727 /* The argument to invert_truthvalue must have Boolean type. */
7728 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7729 arg0 = fold_convert (boolean_type_node, arg0);
7731 /* Note that the operand of this must be an int
7732 and its values must be 0 or 1.
7733 ("true" is a fixed value perhaps depending on the language,
7734 but we don't handle values other than 1 correctly yet.) */
7735 tem = fold_truth_not_expr (arg0);
7738 return fold_convert (type, tem);
7741 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7742 return fold_convert (type, arg0);
7743 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7744 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7745 TREE_OPERAND (arg0, 1));
7746 if (TREE_CODE (arg0) == COMPLEX_CST)
7747 return fold_convert (type, TREE_REALPART (arg0));
7748 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7750 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7751 tem = fold_build2 (TREE_CODE (arg0), itype,
7752 fold_build1 (REALPART_EXPR, itype,
7753 TREE_OPERAND (arg0, 0)),
7754 fold_build1 (REALPART_EXPR, itype,
7755 TREE_OPERAND (arg0, 1)));
7756 return fold_convert (type, tem);
7758 if (TREE_CODE (arg0) == CONJ_EXPR)
7760 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7761 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7762 return fold_convert (type, tem);
7764 if (TREE_CODE (arg0) == CALL_EXPR)
7766 tree fn = get_callee_fndecl (arg0);
7767 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7768 switch (DECL_FUNCTION_CODE (fn))
7770 CASE_FLT_FN (BUILT_IN_CEXPI):
7771 fn = mathfn_built_in (type, BUILT_IN_COS);
7772 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7780 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7781 return fold_convert (type, integer_zero_node);
7782 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7783 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7784 TREE_OPERAND (arg0, 0));
7785 if (TREE_CODE (arg0) == COMPLEX_CST)
7786 return fold_convert (type, TREE_IMAGPART (arg0));
7787 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7789 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7790 tem = fold_build2 (TREE_CODE (arg0), itype,
7791 fold_build1 (IMAGPART_EXPR, itype,
7792 TREE_OPERAND (arg0, 0)),
7793 fold_build1 (IMAGPART_EXPR, itype,
7794 TREE_OPERAND (arg0, 1)));
7795 return fold_convert (type, tem);
7797 if (TREE_CODE (arg0) == CONJ_EXPR)
7799 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7800 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7801 return fold_convert (type, negate_expr (tem));
7803 if (TREE_CODE (arg0) == CALL_EXPR)
7805 tree fn = get_callee_fndecl (arg0);
7806 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7807 switch (DECL_FUNCTION_CODE (fn))
7809 CASE_FLT_FN (BUILT_IN_CEXPI):
7810 fn = mathfn_built_in (type, BUILT_IN_SIN);
7811 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7820 } /* switch (code) */
7823 /* Fold a binary expression of code CODE and type TYPE with operands
7824 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7825 Return the folded expression if folding is successful. Otherwise,
7826 return NULL_TREE. */
7829 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7831 enum tree_code compl_code;
7833 if (code == MIN_EXPR)
7834 compl_code = MAX_EXPR;
7835 else if (code == MAX_EXPR)
7836 compl_code = MIN_EXPR;
7840 /* MIN (MAX (a, b), b) == b. */
7841 if (TREE_CODE (op0) == compl_code
7842 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7843 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7845 /* MIN (MAX (b, a), b) == b. */
7846 if (TREE_CODE (op0) == compl_code
7847 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7848 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7849 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7851 /* MIN (a, MAX (a, b)) == a. */
7852 if (TREE_CODE (op1) == compl_code
7853 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7854 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7855 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7857 /* MIN (a, MAX (b, a)) == a. */
7858 if (TREE_CODE (op1) == compl_code
7859 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7860 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7861 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7866 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7867 by changing CODE to reduce the magnitude of constants involved in
7868 ARG0 of the comparison.
7869 Returns a canonicalized comparison tree if a simplification was
7870 possible, otherwise returns NULL_TREE. */
7873 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7874 tree arg0, tree arg1)
7876 enum tree_code code0 = TREE_CODE (arg0);
7877 tree t, cst0 = NULL_TREE;
7881 /* Match A +- CST code arg1 and CST code arg1. */
7882 if (!(((code0 == MINUS_EXPR
7883 || code0 == PLUS_EXPR)
7884 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7885 || code0 == INTEGER_CST))
7888 /* Identify the constant in arg0 and its sign. */
7889 if (code0 == INTEGER_CST)
7892 cst0 = TREE_OPERAND (arg0, 1);
7893 sgn0 = tree_int_cst_sgn (cst0);
7895 /* Overflowed constants and zero will cause problems. */
7896 if (integer_zerop (cst0)
7897 || TREE_OVERFLOW (cst0))
7900 /* See if we can reduce the magnitude of the constant in
7901 arg0 by changing the comparison code. */
7902 if (code0 == INTEGER_CST)
7904 /* CST <= arg1 -> CST-1 < arg1. */
7905 if (code == LE_EXPR && sgn0 == 1)
7907 /* -CST < arg1 -> -CST-1 <= arg1. */
7908 else if (code == LT_EXPR && sgn0 == -1)
7910 /* CST > arg1 -> CST-1 >= arg1. */
7911 else if (code == GT_EXPR && sgn0 == 1)
7913 /* -CST >= arg1 -> -CST-1 > arg1. */
7914 else if (code == GE_EXPR && sgn0 == -1)
7918 /* arg1 code' CST' might be more canonical. */
7923 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7925 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7927 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7928 else if (code == GT_EXPR
7929 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7931 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7932 else if (code == LE_EXPR
7933 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7935 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7936 else if (code == GE_EXPR
7937 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7943 /* Now build the constant reduced in magnitude. */
7944 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7945 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7946 if (code0 != INTEGER_CST)
7947 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7949 /* If swapping might yield to a more canonical form, do so. */
7951 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7953 return fold_build2 (code, type, t, arg1);
7956 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7957 overflow further. Try to decrease the magnitude of constants involved
7958 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7959 and put sole constants at the second argument position.
7960 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7963 maybe_canonicalize_comparison (enum tree_code code, tree type,
7964 tree arg0, tree arg1)
7968 /* In principle pointers also have undefined overflow behavior,
7969 but that causes problems elsewhere. */
7970 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
7971 || POINTER_TYPE_P (TREE_TYPE (arg0)))
7974 /* Try canonicalization by simplifying arg0. */
7975 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7979 /* Try canonicalization by simplifying arg1 using the swapped
7981 code = swap_tree_comparison (code);
7982 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7985 /* Subroutine of fold_binary. This routine performs all of the
7986 transformations that are common to the equality/inequality
7987 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7988 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7989 fold_binary should call fold_binary. Fold a comparison with
7990 tree code CODE and type TYPE with operands OP0 and OP1. Return
7991 the folded comparison or NULL_TREE. */
7994 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7996 tree arg0, arg1, tem;
8001 STRIP_SIGN_NOPS (arg0);
8002 STRIP_SIGN_NOPS (arg1);
8004 tem = fold_relational_const (code, type, arg0, arg1);
8005 if (tem != NULL_TREE)
8008 /* If one arg is a real or integer constant, put it last. */
8009 if (tree_swap_operands_p (arg0, arg1, true))
8010 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8012 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8013 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8014 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8015 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8016 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8017 && (TREE_CODE (arg1) == INTEGER_CST
8018 && !TREE_OVERFLOW (arg1)))
8020 tree const1 = TREE_OPERAND (arg0, 1);
8022 tree variable = TREE_OPERAND (arg0, 0);
8025 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8027 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8028 TREE_TYPE (arg1), const2, const1);
8029 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8030 && (TREE_CODE (lhs) != INTEGER_CST
8031 || !TREE_OVERFLOW (lhs)))
8032 return fold_build2 (code, type, variable, lhs);
8035 /* For comparisons of pointers we can decompose it to a compile time
8036 comparison of the base objects and the offsets into the object.
8037 This requires at least one operand being an ADDR_EXPR to do more
8038 than the operand_equal_p test below. */
8039 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8040 && (TREE_CODE (arg0) == ADDR_EXPR
8041 || TREE_CODE (arg1) == ADDR_EXPR))
8043 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8044 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8045 enum machine_mode mode;
8046 int volatilep, unsignedp;
8047 bool indirect_base0 = false;
8049 /* Get base and offset for the access. Strip ADDR_EXPR for
8050 get_inner_reference, but put it back by stripping INDIRECT_REF
8051 off the base object if possible. */
8053 if (TREE_CODE (arg0) == ADDR_EXPR)
8055 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8056 &bitsize, &bitpos0, &offset0, &mode,
8057 &unsignedp, &volatilep, false);
8058 if (TREE_CODE (base0) == INDIRECT_REF)
8059 base0 = TREE_OPERAND (base0, 0);
8061 indirect_base0 = true;
8065 if (TREE_CODE (arg1) == ADDR_EXPR)
8067 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8068 &bitsize, &bitpos1, &offset1, &mode,
8069 &unsignedp, &volatilep, false);
8070 /* We have to make sure to have an indirect/non-indirect base1
8071 just the same as we did for base0. */
8072 if (TREE_CODE (base1) == INDIRECT_REF
8074 base1 = TREE_OPERAND (base1, 0);
8075 else if (!indirect_base0)
8078 else if (indirect_base0)
8081 /* If we have equivalent bases we might be able to simplify. */
8083 && operand_equal_p (base0, base1, 0))
8085 /* We can fold this expression to a constant if the non-constant
8086 offset parts are equal. */
8087 if (offset0 == offset1
8088 || (offset0 && offset1
8089 && operand_equal_p (offset0, offset1, 0)))
8094 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8096 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8098 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8100 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8102 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8104 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8108 /* We can simplify the comparison to a comparison of the variable
8109 offset parts if the constant offset parts are equal.
8110 Be careful to use signed size type here because otherwise we
8111 mess with array offsets in the wrong way. This is possible
8112 because pointer arithmetic is restricted to retain within an
8113 object and overflow on pointer differences is undefined as of
8114 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8115 else if (bitpos0 == bitpos1)
8117 tree signed_size_type_node;
8118 signed_size_type_node = signed_type_for (size_type_node);
8120 /* By converting to signed size type we cover middle-end pointer
8121 arithmetic which operates on unsigned pointer types of size
8122 type size and ARRAY_REF offsets which are properly sign or
8123 zero extended from their type in case it is narrower than
8125 if (offset0 == NULL_TREE)
8126 offset0 = build_int_cst (signed_size_type_node, 0);
8128 offset0 = fold_convert (signed_size_type_node, offset0);
8129 if (offset1 == NULL_TREE)
8130 offset1 = build_int_cst (signed_size_type_node, 0);
8132 offset1 = fold_convert (signed_size_type_node, offset1);
8134 return fold_build2 (code, type, offset0, offset1);
8139 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8140 same object, then we can fold this to a comparison of the two offsets in
8141 signed size type. This is possible because pointer arithmetic is
8142 restricted to retain within an object and overflow on pointer differences
8143 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8145 We check flag_wrapv directly because pointers types are unsigned,
8146 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8147 normally what we want to avoid certain odd overflow cases, but
8149 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8151 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8153 tree base0, offset0, base1, offset1;
8155 if (extract_array_ref (arg0, &base0, &offset0)
8156 && extract_array_ref (arg1, &base1, &offset1)
8157 && operand_equal_p (base0, base1, 0))
8159 tree signed_size_type_node;
8160 signed_size_type_node = signed_type_for (size_type_node);
8162 /* By converting to signed size type we cover middle-end pointer
8163 arithmetic which operates on unsigned pointer types of size
8164 type size and ARRAY_REF offsets which are properly sign or
8165 zero extended from their type in case it is narrower than
8167 if (offset0 == NULL_TREE)
8168 offset0 = build_int_cst (signed_size_type_node, 0);
8170 offset0 = fold_convert (signed_size_type_node, offset0);
8171 if (offset1 == NULL_TREE)
8172 offset1 = build_int_cst (signed_size_type_node, 0);
8174 offset1 = fold_convert (signed_size_type_node, offset1);
8176 return fold_build2 (code, type, offset0, offset1);
8180 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8181 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8182 the resulting offset is smaller in absolute value than the
8184 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8185 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8186 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8187 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8188 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8189 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8190 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8192 tree const1 = TREE_OPERAND (arg0, 1);
8193 tree const2 = TREE_OPERAND (arg1, 1);
8194 tree variable1 = TREE_OPERAND (arg0, 0);
8195 tree variable2 = TREE_OPERAND (arg1, 0);
8198 /* Put the constant on the side where it doesn't overflow and is
8199 of lower absolute value than before. */
8200 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8201 ? MINUS_EXPR : PLUS_EXPR,
8203 if (!TREE_OVERFLOW (cst)
8204 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8205 return fold_build2 (code, type,
8207 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8210 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8211 ? MINUS_EXPR : PLUS_EXPR,
8213 if (!TREE_OVERFLOW (cst)
8214 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8215 return fold_build2 (code, type,
8216 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8221 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8222 signed arithmetic case. That form is created by the compiler
8223 often enough for folding it to be of value. One example is in
8224 computing loop trip counts after Operator Strength Reduction. */
8225 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8226 && TREE_CODE (arg0) == MULT_EXPR
8227 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8228 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8229 && integer_zerop (arg1))
8231 tree const1 = TREE_OPERAND (arg0, 1);
8232 tree const2 = arg1; /* zero */
8233 tree variable1 = TREE_OPERAND (arg0, 0);
8234 enum tree_code cmp_code = code;
8236 gcc_assert (!integer_zerop (const1));
8238 /* If const1 is negative we swap the sense of the comparison. */
8239 if (tree_int_cst_sgn (const1) < 0)
8240 cmp_code = swap_tree_comparison (cmp_code);
8242 return fold_build2 (cmp_code, type, variable1, const2);
8245 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8249 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8251 tree targ0 = strip_float_extensions (arg0);
8252 tree targ1 = strip_float_extensions (arg1);
8253 tree newtype = TREE_TYPE (targ0);
8255 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8256 newtype = TREE_TYPE (targ1);
8258 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8259 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8260 return fold_build2 (code, type, fold_convert (newtype, targ0),
8261 fold_convert (newtype, targ1));
8263 /* (-a) CMP (-b) -> b CMP a */
8264 if (TREE_CODE (arg0) == NEGATE_EXPR
8265 && TREE_CODE (arg1) == NEGATE_EXPR)
8266 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8267 TREE_OPERAND (arg0, 0));
8269 if (TREE_CODE (arg1) == REAL_CST)
8271 REAL_VALUE_TYPE cst;
8272 cst = TREE_REAL_CST (arg1);
8274 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8275 if (TREE_CODE (arg0) == NEGATE_EXPR)
8276 return fold_build2 (swap_tree_comparison (code), type,
8277 TREE_OPERAND (arg0, 0),
8278 build_real (TREE_TYPE (arg1),
8279 REAL_VALUE_NEGATE (cst)));
8281 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8282 /* a CMP (-0) -> a CMP 0 */
8283 if (REAL_VALUE_MINUS_ZERO (cst))
8284 return fold_build2 (code, type, arg0,
8285 build_real (TREE_TYPE (arg1), dconst0));
8287 /* x != NaN is always true, other ops are always false. */
8288 if (REAL_VALUE_ISNAN (cst)
8289 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8291 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8292 return omit_one_operand (type, tem, arg0);
8295 /* Fold comparisons against infinity. */
8296 if (REAL_VALUE_ISINF (cst))
8298 tem = fold_inf_compare (code, type, arg0, arg1);
8299 if (tem != NULL_TREE)
8304 /* If this is a comparison of a real constant with a PLUS_EXPR
8305 or a MINUS_EXPR of a real constant, we can convert it into a
8306 comparison with a revised real constant as long as no overflow
8307 occurs when unsafe_math_optimizations are enabled. */
8308 if (flag_unsafe_math_optimizations
8309 && TREE_CODE (arg1) == REAL_CST
8310 && (TREE_CODE (arg0) == PLUS_EXPR
8311 || TREE_CODE (arg0) == MINUS_EXPR)
8312 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8313 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8314 ? MINUS_EXPR : PLUS_EXPR,
8315 arg1, TREE_OPERAND (arg0, 1), 0))
8316 && !TREE_OVERFLOW (tem))
8317 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8319 /* Likewise, we can simplify a comparison of a real constant with
8320 a MINUS_EXPR whose first operand is also a real constant, i.e.
8321 (c1 - x) < c2 becomes x > c1-c2. */
8322 if (flag_unsafe_math_optimizations
8323 && TREE_CODE (arg1) == REAL_CST
8324 && TREE_CODE (arg0) == MINUS_EXPR
8325 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8326 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8328 && !TREE_OVERFLOW (tem))
8329 return fold_build2 (swap_tree_comparison (code), type,
8330 TREE_OPERAND (arg0, 1), tem);
8332 /* Fold comparisons against built-in math functions. */
8333 if (TREE_CODE (arg1) == REAL_CST
8334 && flag_unsafe_math_optimizations
8335 && ! flag_errno_math)
8337 enum built_in_function fcode = builtin_mathfn_code (arg0);
8339 if (fcode != END_BUILTINS)
8341 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8342 if (tem != NULL_TREE)
8348 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8349 if (TREE_CONSTANT (arg1)
8350 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8351 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8352 /* This optimization is invalid for ordered comparisons
8353 if CONST+INCR overflows or if foo+incr might overflow.
8354 This optimization is invalid for floating point due to rounding.
8355 For pointer types we assume overflow doesn't happen. */
8356 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8357 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8358 && (code == EQ_EXPR || code == NE_EXPR))))
8360 tree varop, newconst;
8362 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8364 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8365 arg1, TREE_OPERAND (arg0, 1));
8366 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8367 TREE_OPERAND (arg0, 0),
8368 TREE_OPERAND (arg0, 1));
8372 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8373 arg1, TREE_OPERAND (arg0, 1));
8374 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8375 TREE_OPERAND (arg0, 0),
8376 TREE_OPERAND (arg0, 1));
8380 /* If VAROP is a reference to a bitfield, we must mask
8381 the constant by the width of the field. */
8382 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8383 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8384 && host_integerp (DECL_SIZE (TREE_OPERAND
8385 (TREE_OPERAND (varop, 0), 1)), 1))
8387 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8388 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8389 tree folded_compare, shift;
8391 /* First check whether the comparison would come out
8392 always the same. If we don't do that we would
8393 change the meaning with the masking. */
8394 folded_compare = fold_build2 (code, type,
8395 TREE_OPERAND (varop, 0), arg1);
8396 if (TREE_CODE (folded_compare) == INTEGER_CST)
8397 return omit_one_operand (type, folded_compare, varop);
8399 shift = build_int_cst (NULL_TREE,
8400 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8401 shift = fold_convert (TREE_TYPE (varop), shift);
8402 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8404 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8408 return fold_build2 (code, type, varop, newconst);
8411 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8412 && (TREE_CODE (arg0) == NOP_EXPR
8413 || TREE_CODE (arg0) == CONVERT_EXPR))
8415 /* If we are widening one operand of an integer comparison,
8416 see if the other operand is similarly being widened. Perhaps we
8417 can do the comparison in the narrower type. */
8418 tem = fold_widened_comparison (code, type, arg0, arg1);
8422 /* Or if we are changing signedness. */
8423 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8428 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8429 constant, we can simplify it. */
8430 if (TREE_CODE (arg1) == INTEGER_CST
8431 && (TREE_CODE (arg0) == MIN_EXPR
8432 || TREE_CODE (arg0) == MAX_EXPR)
8433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8435 tem = optimize_minmax_comparison (code, type, op0, op1);
8440 /* Simplify comparison of something with itself. (For IEEE
8441 floating-point, we can only do some of these simplifications.) */
8442 if (operand_equal_p (arg0, arg1, 0))
8447 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8448 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8449 return constant_boolean_node (1, type);
8454 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8455 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8456 return constant_boolean_node (1, type);
8457 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8460 /* For NE, we can only do this simplification if integer
8461 or we don't honor IEEE floating point NaNs. */
8462 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8463 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8465 /* ... fall through ... */
8468 return constant_boolean_node (0, type);
8474 /* If we are comparing an expression that just has comparisons
8475 of two integer values, arithmetic expressions of those comparisons,
8476 and constants, we can simplify it. There are only three cases
8477 to check: the two values can either be equal, the first can be
8478 greater, or the second can be greater. Fold the expression for
8479 those three values. Since each value must be 0 or 1, we have
8480 eight possibilities, each of which corresponds to the constant 0
8481 or 1 or one of the six possible comparisons.
8483 This handles common cases like (a > b) == 0 but also handles
8484 expressions like ((x > y) - (y > x)) > 0, which supposedly
8485 occur in macroized code. */
8487 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8489 tree cval1 = 0, cval2 = 0;
8492 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8493 /* Don't handle degenerate cases here; they should already
8494 have been handled anyway. */
8495 && cval1 != 0 && cval2 != 0
8496 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8497 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8498 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8499 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8500 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8501 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8502 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8504 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8505 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8507 /* We can't just pass T to eval_subst in case cval1 or cval2
8508 was the same as ARG1. */
8511 = fold_build2 (code, type,
8512 eval_subst (arg0, cval1, maxval,
8516 = fold_build2 (code, type,
8517 eval_subst (arg0, cval1, maxval,
8521 = fold_build2 (code, type,
8522 eval_subst (arg0, cval1, minval,
8526 /* All three of these results should be 0 or 1. Confirm they are.
8527 Then use those values to select the proper code to use. */
8529 if (TREE_CODE (high_result) == INTEGER_CST
8530 && TREE_CODE (equal_result) == INTEGER_CST
8531 && TREE_CODE (low_result) == INTEGER_CST)
8533 /* Make a 3-bit mask with the high-order bit being the
8534 value for `>', the next for '=', and the low for '<'. */
8535 switch ((integer_onep (high_result) * 4)
8536 + (integer_onep (equal_result) * 2)
8537 + integer_onep (low_result))
8541 return omit_one_operand (type, integer_zero_node, arg0);
8562 return omit_one_operand (type, integer_one_node, arg0);
8566 return save_expr (build2 (code, type, cval1, cval2));
8567 return fold_build2 (code, type, cval1, cval2);
8572 /* Fold a comparison of the address of COMPONENT_REFs with the same
8573 type and component to a comparison of the address of the base
8574 object. In short, &x->a OP &y->a to x OP y and
8575 &x->a OP &y.a to x OP &y */
8576 if (TREE_CODE (arg0) == ADDR_EXPR
8577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8578 && TREE_CODE (arg1) == ADDR_EXPR
8579 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8581 tree cref0 = TREE_OPERAND (arg0, 0);
8582 tree cref1 = TREE_OPERAND (arg1, 0);
8583 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8585 tree op0 = TREE_OPERAND (cref0, 0);
8586 tree op1 = TREE_OPERAND (cref1, 0);
8587 return fold_build2 (code, type,
8588 build_fold_addr_expr (op0),
8589 build_fold_addr_expr (op1));
8593 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8594 into a single range test. */
8595 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8596 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8597 && TREE_CODE (arg1) == INTEGER_CST
8598 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8599 && !integer_zerop (TREE_OPERAND (arg0, 1))
8600 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8601 && !TREE_OVERFLOW (arg1))
8603 tem = fold_div_compare (code, type, arg0, arg1);
8604 if (tem != NULL_TREE)
8608 /* Fold ~X op ~Y as Y op X. */
8609 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8610 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8611 return fold_build2 (code, type,
8612 TREE_OPERAND (arg1, 0),
8613 TREE_OPERAND (arg0, 0));
8615 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8616 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8617 && TREE_CODE (arg1) == INTEGER_CST)
8618 return fold_build2 (swap_tree_comparison (code), type,
8619 TREE_OPERAND (arg0, 0),
8620 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8626 /* Subroutine of fold_binary. Optimize complex multiplications of the
8627 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8628 argument EXPR represents the expression "z" of type TYPE. */
8631 fold_mult_zconjz (tree type, tree expr)
8633 tree itype = TREE_TYPE (type);
8634 tree rpart, ipart, tem;
8636 if (TREE_CODE (expr) == COMPLEX_EXPR)
8638 rpart = TREE_OPERAND (expr, 0);
8639 ipart = TREE_OPERAND (expr, 1);
8641 else if (TREE_CODE (expr) == COMPLEX_CST)
8643 rpart = TREE_REALPART (expr);
8644 ipart = TREE_IMAGPART (expr);
8648 expr = save_expr (expr);
8649 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8650 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8653 rpart = save_expr (rpart);
8654 ipart = save_expr (ipart);
8655 tem = fold_build2 (PLUS_EXPR, itype,
8656 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8657 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8658 return fold_build2 (COMPLEX_EXPR, type, tem,
8659 fold_convert (itype, integer_zero_node));
8663 /* Fold a binary expression of code CODE and type TYPE with operands
8664 OP0 and OP1. Return the folded expression if folding is
8665 successful. Otherwise, return NULL_TREE. */
8668 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8670 enum tree_code_class kind = TREE_CODE_CLASS (code);
8671 tree arg0, arg1, tem;
8672 tree t1 = NULL_TREE;
8674 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8675 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8676 && TREE_CODE_LENGTH (code) == 2
8678 && op1 != NULL_TREE);
8683 /* Strip any conversions that don't change the mode. This is
8684 safe for every expression, except for a comparison expression
8685 because its signedness is derived from its operands. So, in
8686 the latter case, only strip conversions that don't change the
8689 Note that this is done as an internal manipulation within the
8690 constant folder, in order to find the simplest representation
8691 of the arguments so that their form can be studied. In any
8692 cases, the appropriate type conversions should be put back in
8693 the tree that will get out of the constant folder. */
8695 if (kind == tcc_comparison)
8697 STRIP_SIGN_NOPS (arg0);
8698 STRIP_SIGN_NOPS (arg1);
8706 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8707 constant but we can't do arithmetic on them. */
8708 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8709 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8710 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8711 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8713 if (kind == tcc_binary)
8714 tem = const_binop (code, arg0, arg1, 0);
8715 else if (kind == tcc_comparison)
8716 tem = fold_relational_const (code, type, arg0, arg1);
8720 if (tem != NULL_TREE)
8722 if (TREE_TYPE (tem) != type)
8723 tem = fold_convert (type, tem);
8728 /* If this is a commutative operation, and ARG0 is a constant, move it
8729 to ARG1 to reduce the number of tests below. */
8730 if (commutative_tree_code (code)
8731 && tree_swap_operands_p (arg0, arg1, true))
8732 return fold_build2 (code, type, op1, op0);
8734 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8736 First check for cases where an arithmetic operation is applied to a
8737 compound, conditional, or comparison operation. Push the arithmetic
8738 operation inside the compound or conditional to see if any folding
8739 can then be done. Convert comparison to conditional for this purpose.
8740 The also optimizes non-constant cases that used to be done in
8743 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8744 one of the operands is a comparison and the other is a comparison, a
8745 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8746 code below would make the expression more complex. Change it to a
8747 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8748 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8750 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8751 || code == EQ_EXPR || code == NE_EXPR)
8752 && ((truth_value_p (TREE_CODE (arg0))
8753 && (truth_value_p (TREE_CODE (arg1))
8754 || (TREE_CODE (arg1) == BIT_AND_EXPR
8755 && integer_onep (TREE_OPERAND (arg1, 1)))))
8756 || (truth_value_p (TREE_CODE (arg1))
8757 && (truth_value_p (TREE_CODE (arg0))
8758 || (TREE_CODE (arg0) == BIT_AND_EXPR
8759 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8761 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8762 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8765 fold_convert (boolean_type_node, arg0),
8766 fold_convert (boolean_type_node, arg1));
8768 if (code == EQ_EXPR)
8769 tem = invert_truthvalue (tem);
8771 return fold_convert (type, tem);
8774 if (TREE_CODE_CLASS (code) == tcc_binary
8775 || TREE_CODE_CLASS (code) == tcc_comparison)
8777 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8778 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8779 fold_build2 (code, type,
8780 TREE_OPERAND (arg0, 1), op1));
8781 if (TREE_CODE (arg1) == COMPOUND_EXPR
8782 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8783 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8784 fold_build2 (code, type,
8785 op0, TREE_OPERAND (arg1, 1)));
8787 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8789 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8791 /*cond_first_p=*/1);
8792 if (tem != NULL_TREE)
8796 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8798 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8800 /*cond_first_p=*/0);
8801 if (tem != NULL_TREE)
8809 /* A + (-B) -> A - B */
8810 if (TREE_CODE (arg1) == NEGATE_EXPR)
8811 return fold_build2 (MINUS_EXPR, type,
8812 fold_convert (type, arg0),
8813 fold_convert (type, TREE_OPERAND (arg1, 0)));
8814 /* (-A) + B -> B - A */
8815 if (TREE_CODE (arg0) == NEGATE_EXPR
8816 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8817 return fold_build2 (MINUS_EXPR, type,
8818 fold_convert (type, arg1),
8819 fold_convert (type, TREE_OPERAND (arg0, 0)));
8820 /* Convert ~A + 1 to -A. */
8821 if (INTEGRAL_TYPE_P (type)
8822 && TREE_CODE (arg0) == BIT_NOT_EXPR
8823 && integer_onep (arg1))
8824 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8826 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8828 if ((TREE_CODE (arg0) == MULT_EXPR
8829 || TREE_CODE (arg1) == MULT_EXPR)
8830 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8832 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8837 if (! FLOAT_TYPE_P (type))
8839 if (integer_zerop (arg1))
8840 return non_lvalue (fold_convert (type, arg0));
8843 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8844 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8845 && !TYPE_OVERFLOW_TRAPS (type))
8847 t1 = build_int_cst_type (type, -1);
8848 return omit_one_operand (type, t1, arg1);
8852 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8853 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8854 && !TYPE_OVERFLOW_TRAPS (type))
8856 t1 = build_int_cst_type (type, -1);
8857 return omit_one_operand (type, t1, arg0);
8860 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8861 with a constant, and the two constants have no bits in common,
8862 we should treat this as a BIT_IOR_EXPR since this may produce more
8864 if (TREE_CODE (arg0) == BIT_AND_EXPR
8865 && TREE_CODE (arg1) == BIT_AND_EXPR
8866 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8867 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8868 && integer_zerop (const_binop (BIT_AND_EXPR,
8869 TREE_OPERAND (arg0, 1),
8870 TREE_OPERAND (arg1, 1), 0)))
8872 code = BIT_IOR_EXPR;
8876 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8877 (plus (plus (mult) (mult)) (foo)) so that we can
8878 take advantage of the factoring cases below. */
8879 if (((TREE_CODE (arg0) == PLUS_EXPR
8880 || TREE_CODE (arg0) == MINUS_EXPR)
8881 && TREE_CODE (arg1) == MULT_EXPR)
8882 || ((TREE_CODE (arg1) == PLUS_EXPR
8883 || TREE_CODE (arg1) == MINUS_EXPR)
8884 && TREE_CODE (arg0) == MULT_EXPR))
8886 tree parg0, parg1, parg, marg;
8887 enum tree_code pcode;
8889 if (TREE_CODE (arg1) == MULT_EXPR)
8890 parg = arg0, marg = arg1;
8892 parg = arg1, marg = arg0;
8893 pcode = TREE_CODE (parg);
8894 parg0 = TREE_OPERAND (parg, 0);
8895 parg1 = TREE_OPERAND (parg, 1);
8899 if (TREE_CODE (parg0) == MULT_EXPR
8900 && TREE_CODE (parg1) != MULT_EXPR)
8901 return fold_build2 (pcode, type,
8902 fold_build2 (PLUS_EXPR, type,
8903 fold_convert (type, parg0),
8904 fold_convert (type, marg)),
8905 fold_convert (type, parg1));
8906 if (TREE_CODE (parg0) != MULT_EXPR
8907 && TREE_CODE (parg1) == MULT_EXPR)
8908 return fold_build2 (PLUS_EXPR, type,
8909 fold_convert (type, parg0),
8910 fold_build2 (pcode, type,
8911 fold_convert (type, marg),
8916 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8917 of the array. Loop optimizer sometimes produce this type of
8919 if (TREE_CODE (arg0) == ADDR_EXPR)
8921 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8923 return fold_convert (type, tem);
8925 else if (TREE_CODE (arg1) == ADDR_EXPR)
8927 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8929 return fold_convert (type, tem);
8934 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8935 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8936 return non_lvalue (fold_convert (type, arg0));
8938 /* Likewise if the operands are reversed. */
8939 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8940 return non_lvalue (fold_convert (type, arg1));
8942 /* Convert X + -C into X - C. */
8943 if (TREE_CODE (arg1) == REAL_CST
8944 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8946 tem = fold_negate_const (arg1, type);
8947 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8948 return fold_build2 (MINUS_EXPR, type,
8949 fold_convert (type, arg0),
8950 fold_convert (type, tem));
8953 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8954 to __complex__ ( x, y ). This is not the same for SNaNs or
8955 if signed zeros are involved. */
8956 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8957 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8958 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8960 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8961 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8962 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8963 bool arg0rz = false, arg0iz = false;
8964 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8965 || (arg0i && (arg0iz = real_zerop (arg0i))))
8967 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8968 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8969 if (arg0rz && arg1i && real_zerop (arg1i))
8971 tree rp = arg1r ? arg1r
8972 : build1 (REALPART_EXPR, rtype, arg1);
8973 tree ip = arg0i ? arg0i
8974 : build1 (IMAGPART_EXPR, rtype, arg0);
8975 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8977 else if (arg0iz && arg1r && real_zerop (arg1r))
8979 tree rp = arg0r ? arg0r
8980 : build1 (REALPART_EXPR, rtype, arg0);
8981 tree ip = arg1i ? arg1i
8982 : build1 (IMAGPART_EXPR, rtype, arg1);
8983 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8988 if (flag_unsafe_math_optimizations
8989 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8990 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8991 && (tem = distribute_real_division (code, type, arg0, arg1)))
8994 /* Convert x+x into x*2.0. */
8995 if (operand_equal_p (arg0, arg1, 0)
8996 && SCALAR_FLOAT_TYPE_P (type))
8997 return fold_build2 (MULT_EXPR, type, arg0,
8998 build_real (type, dconst2));
9000 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9001 if (flag_unsafe_math_optimizations
9002 && TREE_CODE (arg1) == PLUS_EXPR
9003 && TREE_CODE (arg0) != MULT_EXPR)
9005 tree tree10 = TREE_OPERAND (arg1, 0);
9006 tree tree11 = TREE_OPERAND (arg1, 1);
9007 if (TREE_CODE (tree11) == MULT_EXPR
9008 && TREE_CODE (tree10) == MULT_EXPR)
9011 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9012 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9015 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9016 if (flag_unsafe_math_optimizations
9017 && TREE_CODE (arg0) == PLUS_EXPR
9018 && TREE_CODE (arg1) != MULT_EXPR)
9020 tree tree00 = TREE_OPERAND (arg0, 0);
9021 tree tree01 = TREE_OPERAND (arg0, 1);
9022 if (TREE_CODE (tree01) == MULT_EXPR
9023 && TREE_CODE (tree00) == MULT_EXPR)
9026 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9027 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9033 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9034 is a rotate of A by C1 bits. */
9035 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9036 is a rotate of A by B bits. */
9038 enum tree_code code0, code1;
9039 code0 = TREE_CODE (arg0);
9040 code1 = TREE_CODE (arg1);
9041 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9042 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9043 && operand_equal_p (TREE_OPERAND (arg0, 0),
9044 TREE_OPERAND (arg1, 0), 0)
9045 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9047 tree tree01, tree11;
9048 enum tree_code code01, code11;
9050 tree01 = TREE_OPERAND (arg0, 1);
9051 tree11 = TREE_OPERAND (arg1, 1);
9052 STRIP_NOPS (tree01);
9053 STRIP_NOPS (tree11);
9054 code01 = TREE_CODE (tree01);
9055 code11 = TREE_CODE (tree11);
9056 if (code01 == INTEGER_CST
9057 && code11 == INTEGER_CST
9058 && TREE_INT_CST_HIGH (tree01) == 0
9059 && TREE_INT_CST_HIGH (tree11) == 0
9060 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9061 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9062 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9063 code0 == LSHIFT_EXPR ? tree01 : tree11);
9064 else if (code11 == MINUS_EXPR)
9066 tree tree110, tree111;
9067 tree110 = TREE_OPERAND (tree11, 0);
9068 tree111 = TREE_OPERAND (tree11, 1);
9069 STRIP_NOPS (tree110);
9070 STRIP_NOPS (tree111);
9071 if (TREE_CODE (tree110) == INTEGER_CST
9072 && 0 == compare_tree_int (tree110,
9074 (TREE_TYPE (TREE_OPERAND
9076 && operand_equal_p (tree01, tree111, 0))
9077 return build2 ((code0 == LSHIFT_EXPR
9080 type, TREE_OPERAND (arg0, 0), tree01);
9082 else if (code01 == MINUS_EXPR)
9084 tree tree010, tree011;
9085 tree010 = TREE_OPERAND (tree01, 0);
9086 tree011 = TREE_OPERAND (tree01, 1);
9087 STRIP_NOPS (tree010);
9088 STRIP_NOPS (tree011);
9089 if (TREE_CODE (tree010) == INTEGER_CST
9090 && 0 == compare_tree_int (tree010,
9092 (TREE_TYPE (TREE_OPERAND
9094 && operand_equal_p (tree11, tree011, 0))
9095 return build2 ((code0 != LSHIFT_EXPR
9098 type, TREE_OPERAND (arg0, 0), tree11);
9104 /* In most languages, can't associate operations on floats through
9105 parentheses. Rather than remember where the parentheses were, we
9106 don't associate floats at all, unless the user has specified
9107 -funsafe-math-optimizations. */
9109 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9111 tree var0, con0, lit0, minus_lit0;
9112 tree var1, con1, lit1, minus_lit1;
9114 /* Split both trees into variables, constants, and literals. Then
9115 associate each group together, the constants with literals,
9116 then the result with variables. This increases the chances of
9117 literals being recombined later and of generating relocatable
9118 expressions for the sum of a constant and literal. */
9119 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9120 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9121 code == MINUS_EXPR);
9123 /* Only do something if we found more than two objects. Otherwise,
9124 nothing has changed and we risk infinite recursion. */
9125 if (2 < ((var0 != 0) + (var1 != 0)
9126 + (con0 != 0) + (con1 != 0)
9127 + (lit0 != 0) + (lit1 != 0)
9128 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9130 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9131 if (code == MINUS_EXPR)
9134 var0 = associate_trees (var0, var1, code, type);
9135 con0 = associate_trees (con0, con1, code, type);
9136 lit0 = associate_trees (lit0, lit1, code, type);
9137 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9139 /* Preserve the MINUS_EXPR if the negative part of the literal is
9140 greater than the positive part. Otherwise, the multiplicative
9141 folding code (i.e extract_muldiv) may be fooled in case
9142 unsigned constants are subtracted, like in the following
9143 example: ((X*2 + 4) - 8U)/2. */
9144 if (minus_lit0 && lit0)
9146 if (TREE_CODE (lit0) == INTEGER_CST
9147 && TREE_CODE (minus_lit0) == INTEGER_CST
9148 && tree_int_cst_lt (lit0, minus_lit0))
9150 minus_lit0 = associate_trees (minus_lit0, lit0,
9156 lit0 = associate_trees (lit0, minus_lit0,
9164 return fold_convert (type,
9165 associate_trees (var0, minus_lit0,
9169 con0 = associate_trees (con0, minus_lit0,
9171 return fold_convert (type,
9172 associate_trees (var0, con0,
9177 con0 = associate_trees (con0, lit0, code, type);
9178 return fold_convert (type, associate_trees (var0, con0,
9186 /* A - (-B) -> A + B */
9187 if (TREE_CODE (arg1) == NEGATE_EXPR)
9188 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9189 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9190 if (TREE_CODE (arg0) == NEGATE_EXPR
9191 && (FLOAT_TYPE_P (type)
9192 || INTEGRAL_TYPE_P (type))
9193 && negate_expr_p (arg1)
9194 && reorder_operands_p (arg0, arg1))
9195 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9196 TREE_OPERAND (arg0, 0));
9197 /* Convert -A - 1 to ~A. */
9198 if (INTEGRAL_TYPE_P (type)
9199 && TREE_CODE (arg0) == NEGATE_EXPR
9200 && integer_onep (arg1)
9201 && !TYPE_OVERFLOW_TRAPS (type))
9202 return fold_build1 (BIT_NOT_EXPR, type,
9203 fold_convert (type, TREE_OPERAND (arg0, 0)));
9205 /* Convert -1 - A to ~A. */
9206 if (INTEGRAL_TYPE_P (type)
9207 && integer_all_onesp (arg0))
9208 return fold_build1 (BIT_NOT_EXPR, type, op1);
9210 if (! FLOAT_TYPE_P (type))
9212 if (integer_zerop (arg0))
9213 return negate_expr (fold_convert (type, arg1));
9214 if (integer_zerop (arg1))
9215 return non_lvalue (fold_convert (type, arg0));
9217 /* Fold A - (A & B) into ~B & A. */
9218 if (!TREE_SIDE_EFFECTS (arg0)
9219 && TREE_CODE (arg1) == BIT_AND_EXPR)
9221 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9222 return fold_build2 (BIT_AND_EXPR, type,
9223 fold_build1 (BIT_NOT_EXPR, type,
9224 TREE_OPERAND (arg1, 0)),
9226 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9227 return fold_build2 (BIT_AND_EXPR, type,
9228 fold_build1 (BIT_NOT_EXPR, type,
9229 TREE_OPERAND (arg1, 1)),
9233 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9234 any power of 2 minus 1. */
9235 if (TREE_CODE (arg0) == BIT_AND_EXPR
9236 && TREE_CODE (arg1) == BIT_AND_EXPR
9237 && operand_equal_p (TREE_OPERAND (arg0, 0),
9238 TREE_OPERAND (arg1, 0), 0))
9240 tree mask0 = TREE_OPERAND (arg0, 1);
9241 tree mask1 = TREE_OPERAND (arg1, 1);
9242 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9244 if (operand_equal_p (tem, mask1, 0))
9246 tem = fold_build2 (BIT_XOR_EXPR, type,
9247 TREE_OPERAND (arg0, 0), mask1);
9248 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9253 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9254 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9255 return non_lvalue (fold_convert (type, arg0));
9257 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9258 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9259 (-ARG1 + ARG0) reduces to -ARG1. */
9260 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9261 return negate_expr (fold_convert (type, arg1));
9263 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9264 __complex__ ( x, -y ). This is not the same for SNaNs or if
9265 signed zeros are involved. */
9266 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9267 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9268 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9270 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9271 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9272 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9273 bool arg0rz = false, arg0iz = false;
9274 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9275 || (arg0i && (arg0iz = real_zerop (arg0i))))
9277 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9278 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9279 if (arg0rz && arg1i && real_zerop (arg1i))
9281 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9283 : build1 (REALPART_EXPR, rtype, arg1));
9284 tree ip = arg0i ? arg0i
9285 : build1 (IMAGPART_EXPR, rtype, arg0);
9286 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9288 else if (arg0iz && arg1r && real_zerop (arg1r))
9290 tree rp = arg0r ? arg0r
9291 : build1 (REALPART_EXPR, rtype, arg0);
9292 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9294 : build1 (IMAGPART_EXPR, rtype, arg1));
9295 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9300 /* Fold &x - &x. This can happen from &x.foo - &x.
9301 This is unsafe for certain floats even in non-IEEE formats.
9302 In IEEE, it is unsafe because it does wrong for NaNs.
9303 Also note that operand_equal_p is always false if an operand
9306 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9307 && operand_equal_p (arg0, arg1, 0))
9308 return fold_convert (type, integer_zero_node);
9310 /* A - B -> A + (-B) if B is easily negatable. */
9311 if (negate_expr_p (arg1)
9312 && ((FLOAT_TYPE_P (type)
9313 /* Avoid this transformation if B is a positive REAL_CST. */
9314 && (TREE_CODE (arg1) != REAL_CST
9315 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9316 || INTEGRAL_TYPE_P (type)))
9317 return fold_build2 (PLUS_EXPR, type,
9318 fold_convert (type, arg0),
9319 fold_convert (type, negate_expr (arg1)));
9321 /* Try folding difference of addresses. */
9325 if ((TREE_CODE (arg0) == ADDR_EXPR
9326 || TREE_CODE (arg1) == ADDR_EXPR)
9327 && ptr_difference_const (arg0, arg1, &diff))
9328 return build_int_cst_type (type, diff);
9331 /* Fold &a[i] - &a[j] to i-j. */
9332 if (TREE_CODE (arg0) == ADDR_EXPR
9333 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9334 && TREE_CODE (arg1) == ADDR_EXPR
9335 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9337 tree aref0 = TREE_OPERAND (arg0, 0);
9338 tree aref1 = TREE_OPERAND (arg1, 0);
9339 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9340 TREE_OPERAND (aref1, 0), 0))
9342 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9343 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9344 tree esz = array_ref_element_size (aref0);
9345 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9346 return fold_build2 (MULT_EXPR, type, diff,
9347 fold_convert (type, esz));
9352 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9353 of the array. Loop optimizer sometimes produce this type of
9355 if (TREE_CODE (arg0) == ADDR_EXPR)
9357 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9359 return fold_convert (type, tem);
9362 if (flag_unsafe_math_optimizations
9363 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9364 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9365 && (tem = distribute_real_division (code, type, arg0, arg1)))
9368 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9370 if ((TREE_CODE (arg0) == MULT_EXPR
9371 || TREE_CODE (arg1) == MULT_EXPR)
9372 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9374 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9382 /* (-A) * (-B) -> A * B */
9383 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9384 return fold_build2 (MULT_EXPR, type,
9385 fold_convert (type, TREE_OPERAND (arg0, 0)),
9386 fold_convert (type, negate_expr (arg1)));
9387 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9388 return fold_build2 (MULT_EXPR, type,
9389 fold_convert (type, negate_expr (arg0)),
9390 fold_convert (type, TREE_OPERAND (arg1, 0)));
9392 if (! FLOAT_TYPE_P (type))
9394 if (integer_zerop (arg1))
9395 return omit_one_operand (type, arg1, arg0);
9396 if (integer_onep (arg1))
9397 return non_lvalue (fold_convert (type, arg0));
9398 /* Transform x * -1 into -x. */
9399 if (integer_all_onesp (arg1))
9400 return fold_convert (type, negate_expr (arg0));
9401 /* Transform x * -C into -x * C if x is easily negatable. */
9402 if (TREE_CODE (arg1) == INTEGER_CST
9403 && tree_int_cst_sgn (arg1) == -1
9404 && negate_expr_p (arg0)
9405 && (tem = negate_expr (arg1)) != arg1
9406 && !TREE_OVERFLOW (tem))
9407 return fold_build2 (MULT_EXPR, type,
9408 negate_expr (arg0), tem);
9410 /* (a * (1 << b)) is (a << b) */
9411 if (TREE_CODE (arg1) == LSHIFT_EXPR
9412 && integer_onep (TREE_OPERAND (arg1, 0)))
9413 return fold_build2 (LSHIFT_EXPR, type, arg0,
9414 TREE_OPERAND (arg1, 1));
9415 if (TREE_CODE (arg0) == LSHIFT_EXPR
9416 && integer_onep (TREE_OPERAND (arg0, 0)))
9417 return fold_build2 (LSHIFT_EXPR, type, arg1,
9418 TREE_OPERAND (arg0, 1));
9420 if (TREE_CODE (arg1) == INTEGER_CST
9421 && 0 != (tem = extract_muldiv (op0,
9422 fold_convert (type, arg1),
9424 return fold_convert (type, tem);
9426 /* Optimize z * conj(z) for integer complex numbers. */
9427 if (TREE_CODE (arg0) == CONJ_EXPR
9428 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9429 return fold_mult_zconjz (type, arg1);
9430 if (TREE_CODE (arg1) == CONJ_EXPR
9431 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9432 return fold_mult_zconjz (type, arg0);
9436 /* Maybe fold x * 0 to 0. The expressions aren't the same
9437 when x is NaN, since x * 0 is also NaN. Nor are they the
9438 same in modes with signed zeros, since multiplying a
9439 negative value by 0 gives -0, not +0. */
9440 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9441 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9442 && real_zerop (arg1))
9443 return omit_one_operand (type, arg1, arg0);
9444 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9445 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9446 && real_onep (arg1))
9447 return non_lvalue (fold_convert (type, arg0));
9449 /* Transform x * -1.0 into -x. */
9450 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9451 && real_minus_onep (arg1))
9452 return fold_convert (type, negate_expr (arg0));
9454 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9455 if (flag_unsafe_math_optimizations
9456 && TREE_CODE (arg0) == RDIV_EXPR
9457 && TREE_CODE (arg1) == REAL_CST
9458 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9460 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9463 return fold_build2 (RDIV_EXPR, type, tem,
9464 TREE_OPERAND (arg0, 1));
9467 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9468 if (operand_equal_p (arg0, arg1, 0))
9470 tree tem = fold_strip_sign_ops (arg0);
9471 if (tem != NULL_TREE)
9473 tem = fold_convert (type, tem);
9474 return fold_build2 (MULT_EXPR, type, tem, tem);
9478 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9479 This is not the same for NaNs or if signed zeros are
9481 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9482 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9483 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9484 && TREE_CODE (arg1) == COMPLEX_CST
9485 && real_zerop (TREE_REALPART (arg1)))
9487 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9488 if (real_onep (TREE_IMAGPART (arg1)))
9489 return fold_build2 (COMPLEX_EXPR, type,
9490 negate_expr (fold_build1 (IMAGPART_EXPR,
9492 fold_build1 (REALPART_EXPR, rtype, arg0));
9493 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9494 return fold_build2 (COMPLEX_EXPR, type,
9495 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9496 negate_expr (fold_build1 (REALPART_EXPR,
9500 /* Optimize z * conj(z) for floating point complex numbers.
9501 Guarded by flag_unsafe_math_optimizations as non-finite
9502 imaginary components don't produce scalar results. */
9503 if (flag_unsafe_math_optimizations
9504 && TREE_CODE (arg0) == CONJ_EXPR
9505 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9506 return fold_mult_zconjz (type, arg1);
9507 if (flag_unsafe_math_optimizations
9508 && TREE_CODE (arg1) == CONJ_EXPR
9509 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9510 return fold_mult_zconjz (type, arg0);
9512 if (flag_unsafe_math_optimizations)
9514 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9515 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9517 /* Optimizations of root(...)*root(...). */
9518 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9520 tree rootfn, arg, arglist;
9521 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9522 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9524 /* Optimize sqrt(x)*sqrt(x) as x. */
9525 if (BUILTIN_SQRT_P (fcode0)
9526 && operand_equal_p (arg00, arg10, 0)
9527 && ! HONOR_SNANS (TYPE_MODE (type)))
9530 /* Optimize root(x)*root(y) as root(x*y). */
9531 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9532 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9533 arglist = build_tree_list (NULL_TREE, arg);
9534 return build_function_call_expr (rootfn, arglist);
9537 /* Optimize expN(x)*expN(y) as expN(x+y). */
9538 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9540 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9541 tree arg = fold_build2 (PLUS_EXPR, type,
9542 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9543 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9544 tree arglist = build_tree_list (NULL_TREE, arg);
9545 return build_function_call_expr (expfn, arglist);
9548 /* Optimizations of pow(...)*pow(...). */
9549 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9550 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9551 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9553 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9554 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9556 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9557 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9560 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9561 if (operand_equal_p (arg01, arg11, 0))
9563 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9564 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9565 tree arglist = tree_cons (NULL_TREE, arg,
9566 build_tree_list (NULL_TREE,
9568 return build_function_call_expr (powfn, arglist);
9571 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9572 if (operand_equal_p (arg00, arg10, 0))
9574 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9575 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9576 tree arglist = tree_cons (NULL_TREE, arg00,
9577 build_tree_list (NULL_TREE,
9579 return build_function_call_expr (powfn, arglist);
9583 /* Optimize tan(x)*cos(x) as sin(x). */
9584 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9585 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9586 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9587 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9588 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9589 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9590 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9591 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9593 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9595 if (sinfn != NULL_TREE)
9596 return build_function_call_expr (sinfn,
9597 TREE_OPERAND (arg0, 1));
9600 /* Optimize x*pow(x,c) as pow(x,c+1). */
9601 if (fcode1 == BUILT_IN_POW
9602 || fcode1 == BUILT_IN_POWF
9603 || fcode1 == BUILT_IN_POWL)
9605 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9606 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9608 if (TREE_CODE (arg11) == REAL_CST
9609 && !TREE_OVERFLOW (arg11)
9610 && operand_equal_p (arg0, arg10, 0))
9612 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9616 c = TREE_REAL_CST (arg11);
9617 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9618 arg = build_real (type, c);
9619 arglist = build_tree_list (NULL_TREE, arg);
9620 arglist = tree_cons (NULL_TREE, arg0, arglist);
9621 return build_function_call_expr (powfn, arglist);
9625 /* Optimize pow(x,c)*x as pow(x,c+1). */
9626 if (fcode0 == BUILT_IN_POW
9627 || fcode0 == BUILT_IN_POWF
9628 || fcode0 == BUILT_IN_POWL)
9630 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9631 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9633 if (TREE_CODE (arg01) == REAL_CST
9634 && !TREE_OVERFLOW (arg01)
9635 && operand_equal_p (arg1, arg00, 0))
9637 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9641 c = TREE_REAL_CST (arg01);
9642 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9643 arg = build_real (type, c);
9644 arglist = build_tree_list (NULL_TREE, arg);
9645 arglist = tree_cons (NULL_TREE, arg1, arglist);
9646 return build_function_call_expr (powfn, arglist);
9650 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9652 && operand_equal_p (arg0, arg1, 0))
9654 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9658 tree arg = build_real (type, dconst2);
9659 tree arglist = build_tree_list (NULL_TREE, arg);
9660 arglist = tree_cons (NULL_TREE, arg0, arglist);
9661 return build_function_call_expr (powfn, arglist);
9670 if (integer_all_onesp (arg1))
9671 return omit_one_operand (type, arg1, arg0);
9672 if (integer_zerop (arg1))
9673 return non_lvalue (fold_convert (type, arg0));
9674 if (operand_equal_p (arg0, arg1, 0))
9675 return non_lvalue (fold_convert (type, arg0));
9678 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9679 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9681 t1 = build_int_cst_type (type, -1);
9682 return omit_one_operand (type, t1, arg1);
9686 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9687 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9689 t1 = build_int_cst_type (type, -1);
9690 return omit_one_operand (type, t1, arg0);
9693 /* Canonicalize (X & C1) | C2. */
9694 if (TREE_CODE (arg0) == BIT_AND_EXPR
9695 && TREE_CODE (arg1) == INTEGER_CST
9696 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9698 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9699 int width = TYPE_PRECISION (type);
9700 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9701 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9702 hi2 = TREE_INT_CST_HIGH (arg1);
9703 lo2 = TREE_INT_CST_LOW (arg1);
9705 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9706 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9707 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9709 if (width > HOST_BITS_PER_WIDE_INT)
9711 mhi = (unsigned HOST_WIDE_INT) -1
9712 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9718 mlo = (unsigned HOST_WIDE_INT) -1
9719 >> (HOST_BITS_PER_WIDE_INT - width);
9722 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9723 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9724 return fold_build2 (BIT_IOR_EXPR, type,
9725 TREE_OPERAND (arg0, 0), arg1);
9727 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9730 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9731 return fold_build2 (BIT_IOR_EXPR, type,
9732 fold_build2 (BIT_AND_EXPR, type,
9733 TREE_OPERAND (arg0, 0),
9734 build_int_cst_wide (type,
9740 /* (X & Y) | Y is (X, Y). */
9741 if (TREE_CODE (arg0) == BIT_AND_EXPR
9742 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9743 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9744 /* (X & Y) | X is (Y, X). */
9745 if (TREE_CODE (arg0) == BIT_AND_EXPR
9746 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9747 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9748 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9749 /* X | (X & Y) is (Y, X). */
9750 if (TREE_CODE (arg1) == BIT_AND_EXPR
9751 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9752 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9753 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9754 /* X | (Y & X) is (Y, X). */
9755 if (TREE_CODE (arg1) == BIT_AND_EXPR
9756 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9757 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9758 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9760 t1 = distribute_bit_expr (code, type, arg0, arg1);
9761 if (t1 != NULL_TREE)
9764 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9766 This results in more efficient code for machines without a NAND
9767 instruction. Combine will canonicalize to the first form
9768 which will allow use of NAND instructions provided by the
9769 backend if they exist. */
9770 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9771 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9773 return fold_build1 (BIT_NOT_EXPR, type,
9774 build2 (BIT_AND_EXPR, type,
9775 TREE_OPERAND (arg0, 0),
9776 TREE_OPERAND (arg1, 0)));
9779 /* See if this can be simplified into a rotate first. If that
9780 is unsuccessful continue in the association code. */
9784 if (integer_zerop (arg1))
9785 return non_lvalue (fold_convert (type, arg0));
9786 if (integer_all_onesp (arg1))
9787 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9788 if (operand_equal_p (arg0, arg1, 0))
9789 return omit_one_operand (type, integer_zero_node, arg0);
9792 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9793 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9795 t1 = build_int_cst_type (type, -1);
9796 return omit_one_operand (type, t1, arg1);
9800 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9801 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9803 t1 = build_int_cst_type (type, -1);
9804 return omit_one_operand (type, t1, arg0);
9807 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9808 with a constant, and the two constants have no bits in common,
9809 we should treat this as a BIT_IOR_EXPR since this may produce more
9811 if (TREE_CODE (arg0) == BIT_AND_EXPR
9812 && TREE_CODE (arg1) == BIT_AND_EXPR
9813 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9814 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9815 && integer_zerop (const_binop (BIT_AND_EXPR,
9816 TREE_OPERAND (arg0, 1),
9817 TREE_OPERAND (arg1, 1), 0)))
9819 code = BIT_IOR_EXPR;
9823 /* (X | Y) ^ X -> Y & ~ X*/
9824 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9825 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9827 tree t2 = TREE_OPERAND (arg0, 1);
9828 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9830 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9831 fold_convert (type, t1));
9835 /* (Y | X) ^ X -> Y & ~ X*/
9836 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9837 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9839 tree t2 = TREE_OPERAND (arg0, 0);
9840 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9842 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9843 fold_convert (type, t1));
9847 /* X ^ (X | Y) -> Y & ~ X*/
9848 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9849 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9851 tree t2 = TREE_OPERAND (arg1, 1);
9852 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9854 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9855 fold_convert (type, t1));
9859 /* X ^ (Y | X) -> Y & ~ X*/
9860 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9861 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9863 tree t2 = TREE_OPERAND (arg1, 0);
9864 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9866 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9867 fold_convert (type, t1));
9871 /* Convert ~X ^ ~Y to X ^ Y. */
9872 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9873 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9874 return fold_build2 (code, type,
9875 fold_convert (type, TREE_OPERAND (arg0, 0)),
9876 fold_convert (type, TREE_OPERAND (arg1, 0)));
9878 /* Convert ~X ^ C to X ^ ~C. */
9879 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9880 && TREE_CODE (arg1) == INTEGER_CST)
9881 return fold_build2 (code, type,
9882 fold_convert (type, TREE_OPERAND (arg0, 0)),
9883 fold_build1 (BIT_NOT_EXPR, type, arg1));
9885 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9886 if (TREE_CODE (arg0) == BIT_AND_EXPR
9887 && integer_onep (TREE_OPERAND (arg0, 1))
9888 && integer_onep (arg1))
9889 return fold_build2 (EQ_EXPR, type, arg0,
9890 build_int_cst (TREE_TYPE (arg0), 0));
9892 /* Fold (X & Y) ^ Y as ~X & Y. */
9893 if (TREE_CODE (arg0) == BIT_AND_EXPR
9894 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9896 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9897 return fold_build2 (BIT_AND_EXPR, type,
9898 fold_build1 (BIT_NOT_EXPR, type, tem),
9899 fold_convert (type, arg1));
9901 /* Fold (X & Y) ^ X as ~Y & X. */
9902 if (TREE_CODE (arg0) == BIT_AND_EXPR
9903 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9904 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9906 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9907 return fold_build2 (BIT_AND_EXPR, type,
9908 fold_build1 (BIT_NOT_EXPR, type, tem),
9909 fold_convert (type, arg1));
9911 /* Fold X ^ (X & Y) as X & ~Y. */
9912 if (TREE_CODE (arg1) == BIT_AND_EXPR
9913 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9915 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9916 return fold_build2 (BIT_AND_EXPR, type,
9917 fold_convert (type, arg0),
9918 fold_build1 (BIT_NOT_EXPR, type, tem));
9920 /* Fold X ^ (Y & X) as ~Y & X. */
9921 if (TREE_CODE (arg1) == BIT_AND_EXPR
9922 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9923 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9925 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9926 return fold_build2 (BIT_AND_EXPR, type,
9927 fold_build1 (BIT_NOT_EXPR, type, tem),
9928 fold_convert (type, arg0));
9931 /* See if this can be simplified into a rotate first. If that
9932 is unsuccessful continue in the association code. */
9936 if (integer_all_onesp (arg1))
9937 return non_lvalue (fold_convert (type, arg0));
9938 if (integer_zerop (arg1))
9939 return omit_one_operand (type, arg1, arg0);
9940 if (operand_equal_p (arg0, arg1, 0))
9941 return non_lvalue (fold_convert (type, arg0));
9943 /* ~X & X is always zero. */
9944 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9945 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9946 return omit_one_operand (type, integer_zero_node, arg1);
9948 /* X & ~X is always zero. */
9949 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9950 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9951 return omit_one_operand (type, integer_zero_node, arg0);
9953 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9954 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9955 && TREE_CODE (arg1) == INTEGER_CST
9956 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9957 return fold_build2 (BIT_IOR_EXPR, type,
9958 fold_build2 (BIT_AND_EXPR, type,
9959 TREE_OPERAND (arg0, 0), arg1),
9960 fold_build2 (BIT_AND_EXPR, type,
9961 TREE_OPERAND (arg0, 1), arg1));
9963 /* (X | Y) & Y is (X, Y). */
9964 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9965 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9966 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9967 /* (X | Y) & X is (Y, X). */
9968 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9969 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9970 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9971 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9972 /* X & (X | Y) is (Y, X). */
9973 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9974 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9975 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9976 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9977 /* X & (Y | X) is (Y, X). */
9978 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9979 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9980 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9981 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9983 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9984 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9985 && integer_onep (TREE_OPERAND (arg0, 1))
9986 && integer_onep (arg1))
9988 tem = TREE_OPERAND (arg0, 0);
9989 return fold_build2 (EQ_EXPR, type,
9990 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9991 build_int_cst (TREE_TYPE (tem), 1)),
9992 build_int_cst (TREE_TYPE (tem), 0));
9994 /* Fold ~X & 1 as (X & 1) == 0. */
9995 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9996 && integer_onep (arg1))
9998 tem = TREE_OPERAND (arg0, 0);
9999 return fold_build2 (EQ_EXPR, type,
10000 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10001 build_int_cst (TREE_TYPE (tem), 1)),
10002 build_int_cst (TREE_TYPE (tem), 0));
10005 /* Fold (X ^ Y) & Y as ~X & Y. */
10006 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10007 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10009 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10010 return fold_build2 (BIT_AND_EXPR, type,
10011 fold_build1 (BIT_NOT_EXPR, type, tem),
10012 fold_convert (type, arg1));
10014 /* Fold (X ^ Y) & X as ~Y & X. */
10015 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10016 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10017 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10019 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10020 return fold_build2 (BIT_AND_EXPR, type,
10021 fold_build1 (BIT_NOT_EXPR, type, tem),
10022 fold_convert (type, arg1));
10024 /* Fold X & (X ^ Y) as X & ~Y. */
10025 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10026 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10028 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10029 return fold_build2 (BIT_AND_EXPR, type,
10030 fold_convert (type, arg0),
10031 fold_build1 (BIT_NOT_EXPR, type, tem));
10033 /* Fold X & (Y ^ X) as ~Y & X. */
10034 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10035 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10036 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10038 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10039 return fold_build2 (BIT_AND_EXPR, type,
10040 fold_build1 (BIT_NOT_EXPR, type, tem),
10041 fold_convert (type, arg0));
10044 t1 = distribute_bit_expr (code, type, arg0, arg1);
10045 if (t1 != NULL_TREE)
10047 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10048 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10049 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10052 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10054 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10055 && (~TREE_INT_CST_LOW (arg1)
10056 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10057 return fold_convert (type, TREE_OPERAND (arg0, 0));
10060 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10062 This results in more efficient code for machines without a NOR
10063 instruction. Combine will canonicalize to the first form
10064 which will allow use of NOR instructions provided by the
10065 backend if they exist. */
10066 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10067 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10069 return fold_build1 (BIT_NOT_EXPR, type,
10070 build2 (BIT_IOR_EXPR, type,
10071 TREE_OPERAND (arg0, 0),
10072 TREE_OPERAND (arg1, 0)));
10078 /* Don't touch a floating-point divide by zero unless the mode
10079 of the constant can represent infinity. */
10080 if (TREE_CODE (arg1) == REAL_CST
10081 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10082 && real_zerop (arg1))
10085 /* Optimize A / A to 1.0 if we don't care about
10086 NaNs or Infinities. Skip the transformation
10087 for non-real operands. */
10088 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10089 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10090 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10091 && operand_equal_p (arg0, arg1, 0))
10093 tree r = build_real (TREE_TYPE (arg0), dconst1);
10095 return omit_two_operands (type, r, arg0, arg1);
10098 /* The complex version of the above A / A optimization. */
10099 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10100 && operand_equal_p (arg0, arg1, 0))
10102 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10103 if (! HONOR_NANS (TYPE_MODE (elem_type))
10104 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10106 tree r = build_real (elem_type, dconst1);
10107 /* omit_two_operands will call fold_convert for us. */
10108 return omit_two_operands (type, r, arg0, arg1);
10112 /* (-A) / (-B) -> A / B */
10113 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10114 return fold_build2 (RDIV_EXPR, type,
10115 TREE_OPERAND (arg0, 0),
10116 negate_expr (arg1));
10117 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10118 return fold_build2 (RDIV_EXPR, type,
10119 negate_expr (arg0),
10120 TREE_OPERAND (arg1, 0));
10122 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10123 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10124 && real_onep (arg1))
10125 return non_lvalue (fold_convert (type, arg0));
10127 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10128 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10129 && real_minus_onep (arg1))
10130 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10132 /* If ARG1 is a constant, we can convert this to a multiply by the
10133 reciprocal. This does not have the same rounding properties,
10134 so only do this if -funsafe-math-optimizations. We can actually
10135 always safely do it if ARG1 is a power of two, but it's hard to
10136 tell if it is or not in a portable manner. */
10137 if (TREE_CODE (arg1) == REAL_CST)
10139 if (flag_unsafe_math_optimizations
10140 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10142 return fold_build2 (MULT_EXPR, type, arg0, tem);
10143 /* Find the reciprocal if optimizing and the result is exact. */
10147 r = TREE_REAL_CST (arg1);
10148 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10150 tem = build_real (type, r);
10151 return fold_build2 (MULT_EXPR, type,
10152 fold_convert (type, arg0), tem);
10156 /* Convert A/B/C to A/(B*C). */
10157 if (flag_unsafe_math_optimizations
10158 && TREE_CODE (arg0) == RDIV_EXPR)
10159 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10160 fold_build2 (MULT_EXPR, type,
10161 TREE_OPERAND (arg0, 1), arg1));
10163 /* Convert A/(B/C) to (A/B)*C. */
10164 if (flag_unsafe_math_optimizations
10165 && TREE_CODE (arg1) == RDIV_EXPR)
10166 return fold_build2 (MULT_EXPR, type,
10167 fold_build2 (RDIV_EXPR, type, arg0,
10168 TREE_OPERAND (arg1, 0)),
10169 TREE_OPERAND (arg1, 1));
10171 /* Convert C1/(X*C2) into (C1/C2)/X. */
10172 if (flag_unsafe_math_optimizations
10173 && TREE_CODE (arg1) == MULT_EXPR
10174 && TREE_CODE (arg0) == REAL_CST
10175 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10177 tree tem = const_binop (RDIV_EXPR, arg0,
10178 TREE_OPERAND (arg1, 1), 0);
10180 return fold_build2 (RDIV_EXPR, type, tem,
10181 TREE_OPERAND (arg1, 0));
10184 if (flag_unsafe_math_optimizations)
10186 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10187 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10189 /* Optimize sin(x)/cos(x) as tan(x). */
10190 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10191 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10192 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10193 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10194 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10196 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10198 if (tanfn != NULL_TREE)
10199 return build_function_call_expr (tanfn,
10200 TREE_OPERAND (arg0, 1));
10203 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10204 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10205 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10206 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10207 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10208 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10210 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10212 if (tanfn != NULL_TREE)
10214 tree tmp = TREE_OPERAND (arg0, 1);
10215 tmp = build_function_call_expr (tanfn, tmp);
10216 return fold_build2 (RDIV_EXPR, type,
10217 build_real (type, dconst1), tmp);
10221 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10222 NaNs or Infinities. */
10223 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10224 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10225 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10227 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10228 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10230 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10231 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10232 && operand_equal_p (arg00, arg01, 0))
10234 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10236 if (cosfn != NULL_TREE)
10237 return build_function_call_expr (cosfn,
10238 TREE_OPERAND (arg0, 1));
10242 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10243 NaNs or Infinities. */
10244 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10245 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10246 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10248 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10249 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10251 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10252 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10253 && operand_equal_p (arg00, arg01, 0))
10255 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10257 if (cosfn != NULL_TREE)
10259 tree tmp = TREE_OPERAND (arg0, 1);
10260 tmp = build_function_call_expr (cosfn, tmp);
10261 return fold_build2 (RDIV_EXPR, type,
10262 build_real (type, dconst1),
10268 /* Optimize pow(x,c)/x as pow(x,c-1). */
10269 if (fcode0 == BUILT_IN_POW
10270 || fcode0 == BUILT_IN_POWF
10271 || fcode0 == BUILT_IN_POWL)
10273 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10274 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10275 if (TREE_CODE (arg01) == REAL_CST
10276 && !TREE_OVERFLOW (arg01)
10277 && operand_equal_p (arg1, arg00, 0))
10279 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10283 c = TREE_REAL_CST (arg01);
10284 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10285 arg = build_real (type, c);
10286 arglist = build_tree_list (NULL_TREE, arg);
10287 arglist = tree_cons (NULL_TREE, arg1, arglist);
10288 return build_function_call_expr (powfn, arglist);
10292 /* Optimize x/expN(y) into x*expN(-y). */
10293 if (BUILTIN_EXPONENT_P (fcode1))
10295 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10296 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10297 tree arglist = build_tree_list (NULL_TREE,
10298 fold_convert (type, arg));
10299 arg1 = build_function_call_expr (expfn, arglist);
10300 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10303 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10304 if (fcode1 == BUILT_IN_POW
10305 || fcode1 == BUILT_IN_POWF
10306 || fcode1 == BUILT_IN_POWL)
10308 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10309 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10310 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10311 tree neg11 = fold_convert (type, negate_expr (arg11));
10312 tree arglist = tree_cons (NULL_TREE, arg10,
10313 build_tree_list (NULL_TREE, neg11));
10314 arg1 = build_function_call_expr (powfn, arglist);
10315 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10320 case TRUNC_DIV_EXPR:
10321 case FLOOR_DIV_EXPR:
10322 /* Simplify A / (B << N) where A and B are positive and B is
10323 a power of 2, to A >> (N + log2(B)). */
10324 if (TREE_CODE (arg1) == LSHIFT_EXPR
10325 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10327 tree sval = TREE_OPERAND (arg1, 0);
10328 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10330 tree sh_cnt = TREE_OPERAND (arg1, 1);
10331 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10333 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10334 sh_cnt, build_int_cst (NULL_TREE, pow2));
10335 return fold_build2 (RSHIFT_EXPR, type,
10336 fold_convert (type, arg0), sh_cnt);
10341 case ROUND_DIV_EXPR:
10342 case CEIL_DIV_EXPR:
10343 case EXACT_DIV_EXPR:
10344 if (integer_onep (arg1))
10345 return non_lvalue (fold_convert (type, arg0));
10346 if (integer_zerop (arg1))
10348 /* X / -1 is -X. */
10349 if (!TYPE_UNSIGNED (type)
10350 && TREE_CODE (arg1) == INTEGER_CST
10351 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10352 && TREE_INT_CST_HIGH (arg1) == -1)
10353 return fold_convert (type, negate_expr (arg0));
10355 /* Convert -A / -B to A / B when the type is signed and overflow is
10357 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10358 && TREE_CODE (arg0) == NEGATE_EXPR
10359 && negate_expr_p (arg1))
10360 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10361 negate_expr (arg1));
10362 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10363 && TREE_CODE (arg1) == NEGATE_EXPR
10364 && negate_expr_p (arg0))
10365 return fold_build2 (code, type, negate_expr (arg0),
10366 TREE_OPERAND (arg1, 0));
10368 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10369 operation, EXACT_DIV_EXPR.
10371 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10372 At one time others generated faster code, it's not clear if they do
10373 after the last round to changes to the DIV code in expmed.c. */
10374 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10375 && multiple_of_p (type, arg0, arg1))
10376 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10378 if (TREE_CODE (arg1) == INTEGER_CST
10379 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10380 return fold_convert (type, tem);
10384 case CEIL_MOD_EXPR:
10385 case FLOOR_MOD_EXPR:
10386 case ROUND_MOD_EXPR:
10387 case TRUNC_MOD_EXPR:
10388 /* X % 1 is always zero, but be sure to preserve any side
10390 if (integer_onep (arg1))
10391 return omit_one_operand (type, integer_zero_node, arg0);
10393 /* X % 0, return X % 0 unchanged so that we can get the
10394 proper warnings and errors. */
10395 if (integer_zerop (arg1))
10398 /* 0 % X is always zero, but be sure to preserve any side
10399 effects in X. Place this after checking for X == 0. */
10400 if (integer_zerop (arg0))
10401 return omit_one_operand (type, integer_zero_node, arg1);
10403 /* X % -1 is zero. */
10404 if (!TYPE_UNSIGNED (type)
10405 && TREE_CODE (arg1) == INTEGER_CST
10406 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10407 && TREE_INT_CST_HIGH (arg1) == -1)
10408 return omit_one_operand (type, integer_zero_node, arg0);
10410 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10411 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10412 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10413 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10416 /* Also optimize A % (C << N) where C is a power of 2,
10417 to A & ((C << N) - 1). */
10418 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10419 c = TREE_OPERAND (arg1, 0);
10421 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10423 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10424 build_int_cst (TREE_TYPE (arg1), 1));
10425 return fold_build2 (BIT_AND_EXPR, type,
10426 fold_convert (type, arg0),
10427 fold_convert (type, mask));
10431 /* X % -C is the same as X % C. */
10432 if (code == TRUNC_MOD_EXPR
10433 && !TYPE_UNSIGNED (type)
10434 && TREE_CODE (arg1) == INTEGER_CST
10435 && !TREE_OVERFLOW (arg1)
10436 && TREE_INT_CST_HIGH (arg1) < 0
10437 && !TYPE_OVERFLOW_TRAPS (type)
10438 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10439 && !sign_bit_p (arg1, arg1))
10440 return fold_build2 (code, type, fold_convert (type, arg0),
10441 fold_convert (type, negate_expr (arg1)));
10443 /* X % -Y is the same as X % Y. */
10444 if (code == TRUNC_MOD_EXPR
10445 && !TYPE_UNSIGNED (type)
10446 && TREE_CODE (arg1) == NEGATE_EXPR
10447 && !TYPE_OVERFLOW_TRAPS (type))
10448 return fold_build2 (code, type, fold_convert (type, arg0),
10449 fold_convert (type, TREE_OPERAND (arg1, 0)));
10451 if (TREE_CODE (arg1) == INTEGER_CST
10452 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10453 return fold_convert (type, tem);
10459 if (integer_all_onesp (arg0))
10460 return omit_one_operand (type, arg0, arg1);
10464 /* Optimize -1 >> x for arithmetic right shifts. */
10465 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10466 return omit_one_operand (type, arg0, arg1);
10467 /* ... fall through ... */
10471 if (integer_zerop (arg1))
10472 return non_lvalue (fold_convert (type, arg0));
10473 if (integer_zerop (arg0))
10474 return omit_one_operand (type, arg0, arg1);
10476 /* Since negative shift count is not well-defined,
10477 don't try to compute it in the compiler. */
10478 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10481 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10482 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10483 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10484 && host_integerp (TREE_OPERAND (arg0, 1), false)
10485 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10487 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10488 + TREE_INT_CST_LOW (arg1));
10490 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10491 being well defined. */
10492 if (low >= TYPE_PRECISION (type))
10494 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10495 low = low % TYPE_PRECISION (type);
10496 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10497 return build_int_cst (type, 0);
10499 low = TYPE_PRECISION (type) - 1;
10502 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10503 build_int_cst (type, low));
10506 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10507 into x & ((unsigned)-1 >> c) for unsigned types. */
10508 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10509 || (TYPE_UNSIGNED (type)
10510 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10511 && host_integerp (arg1, false)
10512 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10513 && host_integerp (TREE_OPERAND (arg0, 1), false)
10514 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10516 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10517 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10523 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10525 lshift = build_int_cst (type, -1);
10526 lshift = int_const_binop (code, lshift, arg1, 0);
10528 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10532 /* Rewrite an LROTATE_EXPR by a constant into an
10533 RROTATE_EXPR by a new constant. */
10534 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10536 tree tem = build_int_cst (TREE_TYPE (arg1),
10537 GET_MODE_BITSIZE (TYPE_MODE (type)));
10538 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10539 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10542 /* If we have a rotate of a bit operation with the rotate count and
10543 the second operand of the bit operation both constant,
10544 permute the two operations. */
10545 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10546 && (TREE_CODE (arg0) == BIT_AND_EXPR
10547 || TREE_CODE (arg0) == BIT_IOR_EXPR
10548 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10549 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10550 return fold_build2 (TREE_CODE (arg0), type,
10551 fold_build2 (code, type,
10552 TREE_OPERAND (arg0, 0), arg1),
10553 fold_build2 (code, type,
10554 TREE_OPERAND (arg0, 1), arg1));
10556 /* Two consecutive rotates adding up to the width of the mode can
10558 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10559 && TREE_CODE (arg0) == RROTATE_EXPR
10560 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10561 && TREE_INT_CST_HIGH (arg1) == 0
10562 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10563 && ((TREE_INT_CST_LOW (arg1)
10564 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10565 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10566 return TREE_OPERAND (arg0, 0);
10571 if (operand_equal_p (arg0, arg1, 0))
10572 return omit_one_operand (type, arg0, arg1);
10573 if (INTEGRAL_TYPE_P (type)
10574 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10575 return omit_one_operand (type, arg1, arg0);
10576 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10582 if (operand_equal_p (arg0, arg1, 0))
10583 return omit_one_operand (type, arg0, arg1);
10584 if (INTEGRAL_TYPE_P (type)
10585 && TYPE_MAX_VALUE (type)
10586 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10587 return omit_one_operand (type, arg1, arg0);
10588 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10593 case TRUTH_ANDIF_EXPR:
10594 /* Note that the operands of this must be ints
10595 and their values must be 0 or 1.
10596 ("true" is a fixed value perhaps depending on the language.) */
10597 /* If first arg is constant zero, return it. */
10598 if (integer_zerop (arg0))
10599 return fold_convert (type, arg0);
10600 case TRUTH_AND_EXPR:
10601 /* If either arg is constant true, drop it. */
10602 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10603 return non_lvalue (fold_convert (type, arg1));
10604 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10605 /* Preserve sequence points. */
10606 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10607 return non_lvalue (fold_convert (type, arg0));
10608 /* If second arg is constant zero, result is zero, but first arg
10609 must be evaluated. */
10610 if (integer_zerop (arg1))
10611 return omit_one_operand (type, arg1, arg0);
10612 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10613 case will be handled here. */
10614 if (integer_zerop (arg0))
10615 return omit_one_operand (type, arg0, arg1);
10617 /* !X && X is always false. */
10618 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10619 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10620 return omit_one_operand (type, integer_zero_node, arg1);
10621 /* X && !X is always false. */
10622 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10623 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10624 return omit_one_operand (type, integer_zero_node, arg0);
10626 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10627 means A >= Y && A != MAX, but in this case we know that
10630 if (!TREE_SIDE_EFFECTS (arg0)
10631 && !TREE_SIDE_EFFECTS (arg1))
10633 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10634 if (tem && !operand_equal_p (tem, arg0, 0))
10635 return fold_build2 (code, type, tem, arg1);
10637 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10638 if (tem && !operand_equal_p (tem, arg1, 0))
10639 return fold_build2 (code, type, arg0, tem);
10643 /* We only do these simplifications if we are optimizing. */
10647 /* Check for things like (A || B) && (A || C). We can convert this
10648 to A || (B && C). Note that either operator can be any of the four
10649 truth and/or operations and the transformation will still be
10650 valid. Also note that we only care about order for the
10651 ANDIF and ORIF operators. If B contains side effects, this
10652 might change the truth-value of A. */
10653 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10654 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10655 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10656 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10657 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10658 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10660 tree a00 = TREE_OPERAND (arg0, 0);
10661 tree a01 = TREE_OPERAND (arg0, 1);
10662 tree a10 = TREE_OPERAND (arg1, 0);
10663 tree a11 = TREE_OPERAND (arg1, 1);
10664 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10665 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10666 && (code == TRUTH_AND_EXPR
10667 || code == TRUTH_OR_EXPR));
10669 if (operand_equal_p (a00, a10, 0))
10670 return fold_build2 (TREE_CODE (arg0), type, a00,
10671 fold_build2 (code, type, a01, a11));
10672 else if (commutative && operand_equal_p (a00, a11, 0))
10673 return fold_build2 (TREE_CODE (arg0), type, a00,
10674 fold_build2 (code, type, a01, a10));
10675 else if (commutative && operand_equal_p (a01, a10, 0))
10676 return fold_build2 (TREE_CODE (arg0), type, a01,
10677 fold_build2 (code, type, a00, a11));
10679 /* This case if tricky because we must either have commutative
10680 operators or else A10 must not have side-effects. */
10682 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10683 && operand_equal_p (a01, a11, 0))
10684 return fold_build2 (TREE_CODE (arg0), type,
10685 fold_build2 (code, type, a00, a10),
10689 /* See if we can build a range comparison. */
10690 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10693 /* Check for the possibility of merging component references. If our
10694 lhs is another similar operation, try to merge its rhs with our
10695 rhs. Then try to merge our lhs and rhs. */
10696 if (TREE_CODE (arg0) == code
10697 && 0 != (tem = fold_truthop (code, type,
10698 TREE_OPERAND (arg0, 1), arg1)))
10699 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10701 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10706 case TRUTH_ORIF_EXPR:
10707 /* Note that the operands of this must be ints
10708 and their values must be 0 or true.
10709 ("true" is a fixed value perhaps depending on the language.) */
10710 /* If first arg is constant true, return it. */
10711 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10712 return fold_convert (type, arg0);
10713 case TRUTH_OR_EXPR:
10714 /* If either arg is constant zero, drop it. */
10715 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10716 return non_lvalue (fold_convert (type, arg1));
10717 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10718 /* Preserve sequence points. */
10719 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10720 return non_lvalue (fold_convert (type, arg0));
10721 /* If second arg is constant true, result is true, but we must
10722 evaluate first arg. */
10723 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10724 return omit_one_operand (type, arg1, arg0);
10725 /* Likewise for first arg, but note this only occurs here for
10727 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10728 return omit_one_operand (type, arg0, arg1);
10730 /* !X || X is always true. */
10731 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10732 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10733 return omit_one_operand (type, integer_one_node, arg1);
10734 /* X || !X is always true. */
10735 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10736 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10737 return omit_one_operand (type, integer_one_node, arg0);
10741 case TRUTH_XOR_EXPR:
10742 /* If the second arg is constant zero, drop it. */
10743 if (integer_zerop (arg1))
10744 return non_lvalue (fold_convert (type, arg0));
10745 /* If the second arg is constant true, this is a logical inversion. */
10746 if (integer_onep (arg1))
10748 /* Only call invert_truthvalue if operand is a truth value. */
10749 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10750 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10752 tem = invert_truthvalue (arg0);
10753 return non_lvalue (fold_convert (type, tem));
10755 /* Identical arguments cancel to zero. */
10756 if (operand_equal_p (arg0, arg1, 0))
10757 return omit_one_operand (type, integer_zero_node, arg0);
10759 /* !X ^ X is always true. */
10760 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10761 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10762 return omit_one_operand (type, integer_one_node, arg1);
10764 /* X ^ !X is always true. */
10765 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10767 return omit_one_operand (type, integer_one_node, arg0);
10773 tem = fold_comparison (code, type, op0, op1);
10774 if (tem != NULL_TREE)
10777 /* bool_var != 0 becomes bool_var. */
10778 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10779 && code == NE_EXPR)
10780 return non_lvalue (fold_convert (type, arg0));
10782 /* bool_var == 1 becomes bool_var. */
10783 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10784 && code == EQ_EXPR)
10785 return non_lvalue (fold_convert (type, arg0));
10787 /* bool_var != 1 becomes !bool_var. */
10788 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10789 && code == NE_EXPR)
10790 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10792 /* bool_var == 0 becomes !bool_var. */
10793 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10794 && code == EQ_EXPR)
10795 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10797 /* If this is an equality comparison of the address of a non-weak
10798 object against zero, then we know the result. */
10799 if (TREE_CODE (arg0) == ADDR_EXPR
10800 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10801 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10802 && integer_zerop (arg1))
10803 return constant_boolean_node (code != EQ_EXPR, type);
10805 /* If this is an equality comparison of the address of two non-weak,
10806 unaliased symbols neither of which are extern (since we do not
10807 have access to attributes for externs), then we know the result. */
10808 if (TREE_CODE (arg0) == ADDR_EXPR
10809 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10810 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10811 && ! lookup_attribute ("alias",
10812 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10813 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10814 && TREE_CODE (arg1) == ADDR_EXPR
10815 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10816 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10817 && ! lookup_attribute ("alias",
10818 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10819 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10821 /* We know that we're looking at the address of two
10822 non-weak, unaliased, static _DECL nodes.
10824 It is both wasteful and incorrect to call operand_equal_p
10825 to compare the two ADDR_EXPR nodes. It is wasteful in that
10826 all we need to do is test pointer equality for the arguments
10827 to the two ADDR_EXPR nodes. It is incorrect to use
10828 operand_equal_p as that function is NOT equivalent to a
10829 C equality test. It can in fact return false for two
10830 objects which would test as equal using the C equality
10832 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10833 return constant_boolean_node (equal
10834 ? code == EQ_EXPR : code != EQ_EXPR,
10838 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10839 a MINUS_EXPR of a constant, we can convert it into a comparison with
10840 a revised constant as long as no overflow occurs. */
10841 if (TREE_CODE (arg1) == INTEGER_CST
10842 && (TREE_CODE (arg0) == PLUS_EXPR
10843 || TREE_CODE (arg0) == MINUS_EXPR)
10844 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10845 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10846 ? MINUS_EXPR : PLUS_EXPR,
10847 fold_convert (TREE_TYPE (arg0), arg1),
10848 TREE_OPERAND (arg0, 1), 0))
10849 && !TREE_OVERFLOW (tem))
10850 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10852 /* Similarly for a NEGATE_EXPR. */
10853 if (TREE_CODE (arg0) == NEGATE_EXPR
10854 && TREE_CODE (arg1) == INTEGER_CST
10855 && 0 != (tem = negate_expr (arg1))
10856 && TREE_CODE (tem) == INTEGER_CST
10857 && !TREE_OVERFLOW (tem))
10858 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10860 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10861 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10862 && TREE_CODE (arg1) == INTEGER_CST
10863 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10864 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10865 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10866 fold_convert (TREE_TYPE (arg0), arg1),
10867 TREE_OPERAND (arg0, 1)));
10869 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10870 for !=. Don't do this for ordered comparisons due to overflow. */
10871 if (TREE_CODE (arg0) == MINUS_EXPR
10872 && integer_zerop (arg1))
10873 return fold_build2 (code, type,
10874 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10876 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10877 if (TREE_CODE (arg0) == ABS_EXPR
10878 && (integer_zerop (arg1) || real_zerop (arg1)))
10879 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10881 /* If this is an EQ or NE comparison with zero and ARG0 is
10882 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10883 two operations, but the latter can be done in one less insn
10884 on machines that have only two-operand insns or on which a
10885 constant cannot be the first operand. */
10886 if (TREE_CODE (arg0) == BIT_AND_EXPR
10887 && integer_zerop (arg1))
10889 tree arg00 = TREE_OPERAND (arg0, 0);
10890 tree arg01 = TREE_OPERAND (arg0, 1);
10891 if (TREE_CODE (arg00) == LSHIFT_EXPR
10892 && integer_onep (TREE_OPERAND (arg00, 0)))
10894 fold_build2 (code, type,
10895 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10896 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10897 arg01, TREE_OPERAND (arg00, 1)),
10898 fold_convert (TREE_TYPE (arg0),
10899 integer_one_node)),
10901 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10902 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10904 fold_build2 (code, type,
10905 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10906 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10907 arg00, TREE_OPERAND (arg01, 1)),
10908 fold_convert (TREE_TYPE (arg0),
10909 integer_one_node)),
10913 /* If this is an NE or EQ comparison of zero against the result of a
10914 signed MOD operation whose second operand is a power of 2, make
10915 the MOD operation unsigned since it is simpler and equivalent. */
10916 if (integer_zerop (arg1)
10917 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10918 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10919 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10920 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10921 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10922 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10924 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10925 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10926 fold_convert (newtype,
10927 TREE_OPERAND (arg0, 0)),
10928 fold_convert (newtype,
10929 TREE_OPERAND (arg0, 1)));
10931 return fold_build2 (code, type, newmod,
10932 fold_convert (newtype, arg1));
10935 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10936 C1 is a valid shift constant, and C2 is a power of two, i.e.
10938 if (TREE_CODE (arg0) == BIT_AND_EXPR
10939 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10940 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10942 && integer_pow2p (TREE_OPERAND (arg0, 1))
10943 && integer_zerop (arg1))
10945 tree itype = TREE_TYPE (arg0);
10946 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10947 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10949 /* Check for a valid shift count. */
10950 if (TREE_INT_CST_HIGH (arg001) == 0
10951 && TREE_INT_CST_LOW (arg001) < prec)
10953 tree arg01 = TREE_OPERAND (arg0, 1);
10954 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10955 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10956 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10957 can be rewritten as (X & (C2 << C1)) != 0. */
10958 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10960 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10961 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10962 return fold_build2 (code, type, tem, arg1);
10964 /* Otherwise, for signed (arithmetic) shifts,
10965 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10966 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10967 else if (!TYPE_UNSIGNED (itype))
10968 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10969 arg000, build_int_cst (itype, 0));
10970 /* Otherwise, of unsigned (logical) shifts,
10971 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10972 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10974 return omit_one_operand (type,
10975 code == EQ_EXPR ? integer_one_node
10976 : integer_zero_node,
10981 /* If this is an NE comparison of zero with an AND of one, remove the
10982 comparison since the AND will give the correct value. */
10983 if (code == NE_EXPR
10984 && integer_zerop (arg1)
10985 && TREE_CODE (arg0) == BIT_AND_EXPR
10986 && integer_onep (TREE_OPERAND (arg0, 1)))
10987 return fold_convert (type, arg0);
10989 /* If we have (A & C) == C where C is a power of 2, convert this into
10990 (A & C) != 0. Similarly for NE_EXPR. */
10991 if (TREE_CODE (arg0) == BIT_AND_EXPR
10992 && integer_pow2p (TREE_OPERAND (arg0, 1))
10993 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10994 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10995 arg0, fold_convert (TREE_TYPE (arg0),
10996 integer_zero_node));
10998 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10999 bit, then fold the expression into A < 0 or A >= 0. */
11000 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11004 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11005 Similarly for NE_EXPR. */
11006 if (TREE_CODE (arg0) == BIT_AND_EXPR
11007 && TREE_CODE (arg1) == INTEGER_CST
11008 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11010 tree notc = fold_build1 (BIT_NOT_EXPR,
11011 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11012 TREE_OPERAND (arg0, 1));
11013 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11015 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11016 if (integer_nonzerop (dandnotc))
11017 return omit_one_operand (type, rslt, arg0);
11020 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11021 Similarly for NE_EXPR. */
11022 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11023 && TREE_CODE (arg1) == INTEGER_CST
11024 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11026 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11027 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11028 TREE_OPERAND (arg0, 1), notd);
11029 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11030 if (integer_nonzerop (candnotd))
11031 return omit_one_operand (type, rslt, arg0);
11034 /* If this is a comparison of a field, we may be able to simplify it. */
11035 if ((TREE_CODE (arg0) == COMPONENT_REF
11036 || TREE_CODE (arg0) == BIT_FIELD_REF)
11037 /* Handle the constant case even without -O
11038 to make sure the warnings are given. */
11039 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11041 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11046 /* Optimize comparisons of strlen vs zero to a compare of the
11047 first character of the string vs zero. To wit,
11048 strlen(ptr) == 0 => *ptr == 0
11049 strlen(ptr) != 0 => *ptr != 0
11050 Other cases should reduce to one of these two (or a constant)
11051 due to the return value of strlen being unsigned. */
11052 if (TREE_CODE (arg0) == CALL_EXPR
11053 && integer_zerop (arg1))
11055 tree fndecl = get_callee_fndecl (arg0);
11059 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11060 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11061 && (arglist = TREE_OPERAND (arg0, 1))
11062 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
11063 && ! TREE_CHAIN (arglist))
11065 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
11066 return fold_build2 (code, type, iref,
11067 build_int_cst (TREE_TYPE (iref), 0));
11071 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11072 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11073 if (TREE_CODE (arg0) == RSHIFT_EXPR
11074 && integer_zerop (arg1)
11075 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11077 tree arg00 = TREE_OPERAND (arg0, 0);
11078 tree arg01 = TREE_OPERAND (arg0, 1);
11079 tree itype = TREE_TYPE (arg00);
11080 if (TREE_INT_CST_HIGH (arg01) == 0
11081 && TREE_INT_CST_LOW (arg01)
11082 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11084 if (TYPE_UNSIGNED (itype))
11086 itype = lang_hooks.types.signed_type (itype);
11087 arg00 = fold_convert (itype, arg00);
11089 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11090 type, arg00, build_int_cst (itype, 0));
11094 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11095 if (integer_zerop (arg1)
11096 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11097 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11098 TREE_OPERAND (arg0, 1));
11100 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11101 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11102 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11103 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11104 build_int_cst (TREE_TYPE (arg1), 0));
11105 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11106 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11107 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11108 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11109 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11110 build_int_cst (TREE_TYPE (arg1), 0));
11112 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11113 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11114 && TREE_CODE (arg1) == INTEGER_CST
11115 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11116 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11117 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11118 TREE_OPERAND (arg0, 1), arg1));
11120 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11121 (X & C) == 0 when C is a single bit. */
11122 if (TREE_CODE (arg0) == BIT_AND_EXPR
11123 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11124 && integer_zerop (arg1)
11125 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11127 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11128 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11129 TREE_OPERAND (arg0, 1));
11130 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11134 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11135 constant C is a power of two, i.e. a single bit. */
11136 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11137 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11138 && integer_zerop (arg1)
11139 && integer_pow2p (TREE_OPERAND (arg0, 1))
11140 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11141 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11143 tree arg00 = TREE_OPERAND (arg0, 0);
11144 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11145 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11148 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11149 when is C is a power of two, i.e. a single bit. */
11150 if (TREE_CODE (arg0) == BIT_AND_EXPR
11151 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11152 && integer_zerop (arg1)
11153 && integer_pow2p (TREE_OPERAND (arg0, 1))
11154 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11155 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11157 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11158 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11159 arg000, TREE_OPERAND (arg0, 1));
11160 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11161 tem, build_int_cst (TREE_TYPE (tem), 0));
11164 if (integer_zerop (arg1)
11165 && tree_expr_nonzero_p (arg0))
11167 tree res = constant_boolean_node (code==NE_EXPR, type);
11168 return omit_one_operand (type, res, arg0);
11171 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11172 if (TREE_CODE (arg0) == NEGATE_EXPR
11173 && TREE_CODE (arg1) == NEGATE_EXPR)
11174 return fold_build2 (code, type,
11175 TREE_OPERAND (arg0, 0),
11176 TREE_OPERAND (arg1, 0));
11178 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11179 if (TREE_CODE (arg0) == BIT_AND_EXPR
11180 && TREE_CODE (arg1) == BIT_AND_EXPR)
11182 tree arg00 = TREE_OPERAND (arg0, 0);
11183 tree arg01 = TREE_OPERAND (arg0, 1);
11184 tree arg10 = TREE_OPERAND (arg1, 0);
11185 tree arg11 = TREE_OPERAND (arg1, 1);
11186 tree itype = TREE_TYPE (arg0);
11188 if (operand_equal_p (arg01, arg11, 0))
11189 return fold_build2 (code, type,
11190 fold_build2 (BIT_AND_EXPR, itype,
11191 fold_build2 (BIT_XOR_EXPR, itype,
11194 build_int_cst (itype, 0));
11196 if (operand_equal_p (arg01, arg10, 0))
11197 return fold_build2 (code, type,
11198 fold_build2 (BIT_AND_EXPR, itype,
11199 fold_build2 (BIT_XOR_EXPR, itype,
11202 build_int_cst (itype, 0));
11204 if (operand_equal_p (arg00, arg11, 0))
11205 return fold_build2 (code, type,
11206 fold_build2 (BIT_AND_EXPR, itype,
11207 fold_build2 (BIT_XOR_EXPR, itype,
11210 build_int_cst (itype, 0));
11212 if (operand_equal_p (arg00, arg10, 0))
11213 return fold_build2 (code, type,
11214 fold_build2 (BIT_AND_EXPR, itype,
11215 fold_build2 (BIT_XOR_EXPR, itype,
11218 build_int_cst (itype, 0));
11221 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11222 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11224 tree arg00 = TREE_OPERAND (arg0, 0);
11225 tree arg01 = TREE_OPERAND (arg0, 1);
11226 tree arg10 = TREE_OPERAND (arg1, 0);
11227 tree arg11 = TREE_OPERAND (arg1, 1);
11228 tree itype = TREE_TYPE (arg0);
11230 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11231 operand_equal_p guarantees no side-effects so we don't need
11232 to use omit_one_operand on Z. */
11233 if (operand_equal_p (arg01, arg11, 0))
11234 return fold_build2 (code, type, arg00, arg10);
11235 if (operand_equal_p (arg01, arg10, 0))
11236 return fold_build2 (code, type, arg00, arg11);
11237 if (operand_equal_p (arg00, arg11, 0))
11238 return fold_build2 (code, type, arg01, arg10);
11239 if (operand_equal_p (arg00, arg10, 0))
11240 return fold_build2 (code, type, arg01, arg11);
11242 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11243 if (TREE_CODE (arg01) == INTEGER_CST
11244 && TREE_CODE (arg11) == INTEGER_CST)
11245 return fold_build2 (code, type,
11246 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11247 fold_build2 (BIT_XOR_EXPR, itype,
11257 tem = fold_comparison (code, type, op0, op1);
11258 if (tem != NULL_TREE)
11261 /* Transform comparisons of the form X +- C CMP X. */
11262 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11263 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11264 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11265 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11266 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11267 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11269 tree arg01 = TREE_OPERAND (arg0, 1);
11270 enum tree_code code0 = TREE_CODE (arg0);
11273 if (TREE_CODE (arg01) == REAL_CST)
11274 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11276 is_positive = tree_int_cst_sgn (arg01);
11278 /* (X - c) > X becomes false. */
11279 if (code == GT_EXPR
11280 && ((code0 == MINUS_EXPR && is_positive >= 0)
11281 || (code0 == PLUS_EXPR && is_positive <= 0)))
11282 return constant_boolean_node (0, type);
11284 /* Likewise (X + c) < X becomes false. */
11285 if (code == LT_EXPR
11286 && ((code0 == PLUS_EXPR && is_positive >= 0)
11287 || (code0 == MINUS_EXPR && is_positive <= 0)))
11288 return constant_boolean_node (0, type);
11290 /* Convert (X - c) <= X to true. */
11291 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11293 && ((code0 == MINUS_EXPR && is_positive >= 0)
11294 || (code0 == PLUS_EXPR && is_positive <= 0)))
11295 return constant_boolean_node (1, type);
11297 /* Convert (X + c) >= X to true. */
11298 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11300 && ((code0 == PLUS_EXPR && is_positive >= 0)
11301 || (code0 == MINUS_EXPR && is_positive <= 0)))
11302 return constant_boolean_node (1, type);
11304 if (TREE_CODE (arg01) == INTEGER_CST)
11306 /* Convert X + c > X and X - c < X to true for integers. */
11307 if (code == GT_EXPR
11308 && ((code0 == PLUS_EXPR && is_positive > 0)
11309 || (code0 == MINUS_EXPR && is_positive < 0)))
11310 return constant_boolean_node (1, type);
11312 if (code == LT_EXPR
11313 && ((code0 == MINUS_EXPR && is_positive > 0)
11314 || (code0 == PLUS_EXPR && is_positive < 0)))
11315 return constant_boolean_node (1, type);
11317 /* Convert X + c <= X and X - c >= X to false for integers. */
11318 if (code == LE_EXPR
11319 && ((code0 == PLUS_EXPR && is_positive > 0)
11320 || (code0 == MINUS_EXPR && is_positive < 0)))
11321 return constant_boolean_node (0, type);
11323 if (code == GE_EXPR
11324 && ((code0 == MINUS_EXPR && is_positive > 0)
11325 || (code0 == PLUS_EXPR && is_positive < 0)))
11326 return constant_boolean_node (0, type);
11330 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11331 This transformation affects the cases which are handled in later
11332 optimizations involving comparisons with non-negative constants. */
11333 if (TREE_CODE (arg1) == INTEGER_CST
11334 && TREE_CODE (arg0) != INTEGER_CST
11335 && tree_int_cst_sgn (arg1) > 0)
11337 if (code == GE_EXPR)
11339 arg1 = const_binop (MINUS_EXPR, arg1,
11340 build_int_cst (TREE_TYPE (arg1), 1), 0);
11341 return fold_build2 (GT_EXPR, type, arg0,
11342 fold_convert (TREE_TYPE (arg0), arg1));
11344 if (code == LT_EXPR)
11346 arg1 = const_binop (MINUS_EXPR, arg1,
11347 build_int_cst (TREE_TYPE (arg1), 1), 0);
11348 return fold_build2 (LE_EXPR, type, arg0,
11349 fold_convert (TREE_TYPE (arg0), arg1));
11353 /* Comparisons with the highest or lowest possible integer of
11354 the specified precision will have known values. */
11356 tree arg1_type = TREE_TYPE (arg1);
11357 unsigned int width = TYPE_PRECISION (arg1_type);
11359 if (TREE_CODE (arg1) == INTEGER_CST
11360 && !TREE_OVERFLOW (arg1)
11361 && width <= 2 * HOST_BITS_PER_WIDE_INT
11362 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11364 HOST_WIDE_INT signed_max_hi;
11365 unsigned HOST_WIDE_INT signed_max_lo;
11366 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11368 if (width <= HOST_BITS_PER_WIDE_INT)
11370 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11375 if (TYPE_UNSIGNED (arg1_type))
11377 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11383 max_lo = signed_max_lo;
11384 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11390 width -= HOST_BITS_PER_WIDE_INT;
11391 signed_max_lo = -1;
11392 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11397 if (TYPE_UNSIGNED (arg1_type))
11399 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11404 max_hi = signed_max_hi;
11405 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11409 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11410 && TREE_INT_CST_LOW (arg1) == max_lo)
11414 return omit_one_operand (type, integer_zero_node, arg0);
11417 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11420 return omit_one_operand (type, integer_one_node, arg0);
11423 return fold_build2 (NE_EXPR, type, arg0, arg1);
11425 /* The GE_EXPR and LT_EXPR cases above are not normally
11426 reached because of previous transformations. */
11431 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11433 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11437 arg1 = const_binop (PLUS_EXPR, arg1,
11438 build_int_cst (TREE_TYPE (arg1), 1), 0);
11439 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11441 arg1 = const_binop (PLUS_EXPR, arg1,
11442 build_int_cst (TREE_TYPE (arg1), 1), 0);
11443 return fold_build2 (NE_EXPR, type, arg0, arg1);
11447 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11449 && TREE_INT_CST_LOW (arg1) == min_lo)
11453 return omit_one_operand (type, integer_zero_node, arg0);
11456 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11459 return omit_one_operand (type, integer_one_node, arg0);
11462 return fold_build2 (NE_EXPR, type, op0, op1);
11467 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11469 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11473 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11474 return fold_build2 (NE_EXPR, type, arg0, arg1);
11476 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11477 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11482 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11483 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11484 && TYPE_UNSIGNED (arg1_type)
11485 /* We will flip the signedness of the comparison operator
11486 associated with the mode of arg1, so the sign bit is
11487 specified by this mode. Check that arg1 is the signed
11488 max associated with this sign bit. */
11489 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11490 /* signed_type does not work on pointer types. */
11491 && INTEGRAL_TYPE_P (arg1_type))
11493 /* The following case also applies to X < signed_max+1
11494 and X >= signed_max+1 because previous transformations. */
11495 if (code == LE_EXPR || code == GT_EXPR)
11498 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11499 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11500 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11501 type, fold_convert (st0, arg0),
11502 build_int_cst (st1, 0));
11508 /* If we are comparing an ABS_EXPR with a constant, we can
11509 convert all the cases into explicit comparisons, but they may
11510 well not be faster than doing the ABS and one comparison.
11511 But ABS (X) <= C is a range comparison, which becomes a subtraction
11512 and a comparison, and is probably faster. */
11513 if (code == LE_EXPR
11514 && TREE_CODE (arg1) == INTEGER_CST
11515 && TREE_CODE (arg0) == ABS_EXPR
11516 && ! TREE_SIDE_EFFECTS (arg0)
11517 && (0 != (tem = negate_expr (arg1)))
11518 && TREE_CODE (tem) == INTEGER_CST
11519 && !TREE_OVERFLOW (tem))
11520 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11521 build2 (GE_EXPR, type,
11522 TREE_OPERAND (arg0, 0), tem),
11523 build2 (LE_EXPR, type,
11524 TREE_OPERAND (arg0, 0), arg1));
11526 /* Convert ABS_EXPR<x> >= 0 to true. */
11527 if (code == GE_EXPR
11528 && tree_expr_nonnegative_p (arg0)
11529 && (integer_zerop (arg1)
11530 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11531 && real_zerop (arg1))))
11532 return omit_one_operand (type, integer_one_node, arg0);
11534 /* Convert ABS_EXPR<x> < 0 to false. */
11535 if (code == LT_EXPR
11536 && tree_expr_nonnegative_p (arg0)
11537 && (integer_zerop (arg1) || real_zerop (arg1)))
11538 return omit_one_operand (type, integer_zero_node, arg0);
11540 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11541 and similarly for >= into !=. */
11542 if ((code == LT_EXPR || code == GE_EXPR)
11543 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11544 && TREE_CODE (arg1) == LSHIFT_EXPR
11545 && integer_onep (TREE_OPERAND (arg1, 0)))
11546 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11547 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11548 TREE_OPERAND (arg1, 1)),
11549 build_int_cst (TREE_TYPE (arg0), 0));
11551 if ((code == LT_EXPR || code == GE_EXPR)
11552 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11553 && (TREE_CODE (arg1) == NOP_EXPR
11554 || TREE_CODE (arg1) == CONVERT_EXPR)
11555 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11556 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11558 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11559 fold_convert (TREE_TYPE (arg0),
11560 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11561 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11563 build_int_cst (TREE_TYPE (arg0), 0));
11567 case UNORDERED_EXPR:
11575 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11577 t1 = fold_relational_const (code, type, arg0, arg1);
11578 if (t1 != NULL_TREE)
11582 /* If the first operand is NaN, the result is constant. */
11583 if (TREE_CODE (arg0) == REAL_CST
11584 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11585 && (code != LTGT_EXPR || ! flag_trapping_math))
11587 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11588 ? integer_zero_node
11589 : integer_one_node;
11590 return omit_one_operand (type, t1, arg1);
11593 /* If the second operand is NaN, the result is constant. */
11594 if (TREE_CODE (arg1) == REAL_CST
11595 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11596 && (code != LTGT_EXPR || ! flag_trapping_math))
11598 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11599 ? integer_zero_node
11600 : integer_one_node;
11601 return omit_one_operand (type, t1, arg0);
11604 /* Simplify unordered comparison of something with itself. */
11605 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11606 && operand_equal_p (arg0, arg1, 0))
11607 return constant_boolean_node (1, type);
11609 if (code == LTGT_EXPR
11610 && !flag_trapping_math
11611 && operand_equal_p (arg0, arg1, 0))
11612 return constant_boolean_node (0, type);
11614 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11616 tree targ0 = strip_float_extensions (arg0);
11617 tree targ1 = strip_float_extensions (arg1);
11618 tree newtype = TREE_TYPE (targ0);
11620 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11621 newtype = TREE_TYPE (targ1);
11623 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11624 return fold_build2 (code, type, fold_convert (newtype, targ0),
11625 fold_convert (newtype, targ1));
11630 case COMPOUND_EXPR:
11631 /* When pedantic, a compound expression can be neither an lvalue
11632 nor an integer constant expression. */
11633 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11635 /* Don't let (0, 0) be null pointer constant. */
11636 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11637 : fold_convert (type, arg1);
11638 return pedantic_non_lvalue (tem);
11641 if ((TREE_CODE (arg0) == REAL_CST
11642 && TREE_CODE (arg1) == REAL_CST)
11643 || (TREE_CODE (arg0) == INTEGER_CST
11644 && TREE_CODE (arg1) == INTEGER_CST))
11645 return build_complex (type, arg0, arg1);
11649 /* An ASSERT_EXPR should never be passed to fold_binary. */
11650 gcc_unreachable ();
11654 } /* switch (code) */
11657 /* Callback for walk_tree, looking for LABEL_EXPR.
11658 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11659 Do not check the sub-tree of GOTO_EXPR. */
11662 contains_label_1 (tree *tp,
11663 int *walk_subtrees,
11664 void *data ATTRIBUTE_UNUSED)
11666 switch (TREE_CODE (*tp))
11671 *walk_subtrees = 0;
11678 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11679 accessible from outside the sub-tree. Returns NULL_TREE if no
11680 addressable label is found. */
11683 contains_label_p (tree st)
11685 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11688 /* Fold a ternary expression of code CODE and type TYPE with operands
11689 OP0, OP1, and OP2. Return the folded expression if folding is
11690 successful. Otherwise, return NULL_TREE. */
11693 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11696 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11697 enum tree_code_class kind = TREE_CODE_CLASS (code);
11699 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11700 && TREE_CODE_LENGTH (code) == 3);
11702 /* Strip any conversions that don't change the mode. This is safe
11703 for every expression, except for a comparison expression because
11704 its signedness is derived from its operands. So, in the latter
11705 case, only strip conversions that don't change the signedness.
11707 Note that this is done as an internal manipulation within the
11708 constant folder, in order to find the simplest representation of
11709 the arguments so that their form can be studied. In any cases,
11710 the appropriate type conversions should be put back in the tree
11711 that will get out of the constant folder. */
11726 case COMPONENT_REF:
11727 if (TREE_CODE (arg0) == CONSTRUCTOR
11728 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11730 unsigned HOST_WIDE_INT idx;
11732 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11739 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11740 so all simple results must be passed through pedantic_non_lvalue. */
11741 if (TREE_CODE (arg0) == INTEGER_CST)
11743 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11744 tem = integer_zerop (arg0) ? op2 : op1;
11745 /* Only optimize constant conditions when the selected branch
11746 has the same type as the COND_EXPR. This avoids optimizing
11747 away "c ? x : throw", where the throw has a void type.
11748 Avoid throwing away that operand which contains label. */
11749 if ((!TREE_SIDE_EFFECTS (unused_op)
11750 || !contains_label_p (unused_op))
11751 && (! VOID_TYPE_P (TREE_TYPE (tem))
11752 || VOID_TYPE_P (type)))
11753 return pedantic_non_lvalue (tem);
11756 if (operand_equal_p (arg1, op2, 0))
11757 return pedantic_omit_one_operand (type, arg1, arg0);
11759 /* If we have A op B ? A : C, we may be able to convert this to a
11760 simpler expression, depending on the operation and the values
11761 of B and C. Signed zeros prevent all of these transformations,
11762 for reasons given above each one.
11764 Also try swapping the arguments and inverting the conditional. */
11765 if (COMPARISON_CLASS_P (arg0)
11766 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11767 arg1, TREE_OPERAND (arg0, 1))
11768 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11770 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11775 if (COMPARISON_CLASS_P (arg0)
11776 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11778 TREE_OPERAND (arg0, 1))
11779 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11781 tem = fold_truth_not_expr (arg0);
11782 if (tem && COMPARISON_CLASS_P (tem))
11784 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11790 /* If the second operand is simpler than the third, swap them
11791 since that produces better jump optimization results. */
11792 if (truth_value_p (TREE_CODE (arg0))
11793 && tree_swap_operands_p (op1, op2, false))
11795 /* See if this can be inverted. If it can't, possibly because
11796 it was a floating-point inequality comparison, don't do
11798 tem = fold_truth_not_expr (arg0);
11800 return fold_build3 (code, type, tem, op2, op1);
11803 /* Convert A ? 1 : 0 to simply A. */
11804 if (integer_onep (op1)
11805 && integer_zerop (op2)
11806 /* If we try to convert OP0 to our type, the
11807 call to fold will try to move the conversion inside
11808 a COND, which will recurse. In that case, the COND_EXPR
11809 is probably the best choice, so leave it alone. */
11810 && type == TREE_TYPE (arg0))
11811 return pedantic_non_lvalue (arg0);
11813 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11814 over COND_EXPR in cases such as floating point comparisons. */
11815 if (integer_zerop (op1)
11816 && integer_onep (op2)
11817 && truth_value_p (TREE_CODE (arg0)))
11818 return pedantic_non_lvalue (fold_convert (type,
11819 invert_truthvalue (arg0)));
11821 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11822 if (TREE_CODE (arg0) == LT_EXPR
11823 && integer_zerop (TREE_OPERAND (arg0, 1))
11824 && integer_zerop (op2)
11825 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11827 /* sign_bit_p only checks ARG1 bits within A's precision.
11828 If <sign bit of A> has wider type than A, bits outside
11829 of A's precision in <sign bit of A> need to be checked.
11830 If they are all 0, this optimization needs to be done
11831 in unsigned A's type, if they are all 1 in signed A's type,
11832 otherwise this can't be done. */
11833 if (TYPE_PRECISION (TREE_TYPE (tem))
11834 < TYPE_PRECISION (TREE_TYPE (arg1))
11835 && TYPE_PRECISION (TREE_TYPE (tem))
11836 < TYPE_PRECISION (type))
11838 unsigned HOST_WIDE_INT mask_lo;
11839 HOST_WIDE_INT mask_hi;
11840 int inner_width, outer_width;
11843 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11844 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11845 if (outer_width > TYPE_PRECISION (type))
11846 outer_width = TYPE_PRECISION (type);
11848 if (outer_width > HOST_BITS_PER_WIDE_INT)
11850 mask_hi = ((unsigned HOST_WIDE_INT) -1
11851 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11857 mask_lo = ((unsigned HOST_WIDE_INT) -1
11858 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11860 if (inner_width > HOST_BITS_PER_WIDE_INT)
11862 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11863 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11867 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11868 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11870 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11871 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11873 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11874 tem = fold_convert (tem_type, tem);
11876 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11877 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11879 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11880 tem = fold_convert (tem_type, tem);
11887 return fold_convert (type,
11888 fold_build2 (BIT_AND_EXPR,
11889 TREE_TYPE (tem), tem,
11890 fold_convert (TREE_TYPE (tem),
11894 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11895 already handled above. */
11896 if (TREE_CODE (arg0) == BIT_AND_EXPR
11897 && integer_onep (TREE_OPERAND (arg0, 1))
11898 && integer_zerop (op2)
11899 && integer_pow2p (arg1))
11901 tree tem = TREE_OPERAND (arg0, 0);
11903 if (TREE_CODE (tem) == RSHIFT_EXPR
11904 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11905 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11906 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11907 return fold_build2 (BIT_AND_EXPR, type,
11908 TREE_OPERAND (tem, 0), arg1);
11911 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11912 is probably obsolete because the first operand should be a
11913 truth value (that's why we have the two cases above), but let's
11914 leave it in until we can confirm this for all front-ends. */
11915 if (integer_zerop (op2)
11916 && TREE_CODE (arg0) == NE_EXPR
11917 && integer_zerop (TREE_OPERAND (arg0, 1))
11918 && integer_pow2p (arg1)
11919 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11920 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11921 arg1, OEP_ONLY_CONST))
11922 return pedantic_non_lvalue (fold_convert (type,
11923 TREE_OPERAND (arg0, 0)));
11925 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11926 if (integer_zerop (op2)
11927 && truth_value_p (TREE_CODE (arg0))
11928 && truth_value_p (TREE_CODE (arg1)))
11929 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11930 fold_convert (type, arg0),
11933 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11934 if (integer_onep (op2)
11935 && truth_value_p (TREE_CODE (arg0))
11936 && truth_value_p (TREE_CODE (arg1)))
11938 /* Only perform transformation if ARG0 is easily inverted. */
11939 tem = fold_truth_not_expr (arg0);
11941 return fold_build2 (TRUTH_ORIF_EXPR, type,
11942 fold_convert (type, tem),
11946 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11947 if (integer_zerop (arg1)
11948 && truth_value_p (TREE_CODE (arg0))
11949 && truth_value_p (TREE_CODE (op2)))
11951 /* Only perform transformation if ARG0 is easily inverted. */
11952 tem = fold_truth_not_expr (arg0);
11954 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11955 fold_convert (type, tem),
11959 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11960 if (integer_onep (arg1)
11961 && truth_value_p (TREE_CODE (arg0))
11962 && truth_value_p (TREE_CODE (op2)))
11963 return fold_build2 (TRUTH_ORIF_EXPR, type,
11964 fold_convert (type, arg0),
11970 /* Check for a built-in function. */
11971 if (TREE_CODE (op0) == ADDR_EXPR
11972 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11973 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11974 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11977 case BIT_FIELD_REF:
11978 if (TREE_CODE (arg0) == VECTOR_CST
11979 && type == TREE_TYPE (TREE_TYPE (arg0))
11980 && host_integerp (arg1, 1)
11981 && host_integerp (op2, 1))
11983 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11984 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11987 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11988 && (idx % width) == 0
11989 && (idx = idx / width)
11990 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11992 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11993 while (idx-- > 0 && elements)
11994 elements = TREE_CHAIN (elements);
11996 return TREE_VALUE (elements);
11998 return fold_convert (type, integer_zero_node);
12005 } /* switch (code) */
12008 /* Perform constant folding and related simplification of EXPR.
12009 The related simplifications include x*1 => x, x*0 => 0, etc.,
12010 and application of the associative law.
12011 NOP_EXPR conversions may be removed freely (as long as we
12012 are careful not to change the type of the overall expression).
12013 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12014 but we can constant-fold them if they have constant operands. */
12016 #ifdef ENABLE_FOLD_CHECKING
12017 # define fold(x) fold_1 (x)
12018 static tree fold_1 (tree);
12024 const tree t = expr;
12025 enum tree_code code = TREE_CODE (t);
12026 enum tree_code_class kind = TREE_CODE_CLASS (code);
12029 /* Return right away if a constant. */
12030 if (kind == tcc_constant)
12033 if (IS_EXPR_CODE_CLASS (kind)
12034 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12036 tree type = TREE_TYPE (t);
12037 tree op0, op1, op2;
12039 switch (TREE_CODE_LENGTH (code))
12042 op0 = TREE_OPERAND (t, 0);
12043 tem = fold_unary (code, type, op0);
12044 return tem ? tem : expr;
12046 op0 = TREE_OPERAND (t, 0);
12047 op1 = TREE_OPERAND (t, 1);
12048 tem = fold_binary (code, type, op0, op1);
12049 return tem ? tem : expr;
12051 op0 = TREE_OPERAND (t, 0);
12052 op1 = TREE_OPERAND (t, 1);
12053 op2 = TREE_OPERAND (t, 2);
12054 tem = fold_ternary (code, type, op0, op1, op2);
12055 return tem ? tem : expr;
12064 return fold (DECL_INITIAL (t));
12068 } /* switch (code) */
12071 #ifdef ENABLE_FOLD_CHECKING
12074 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12075 static void fold_check_failed (tree, tree);
12076 void print_fold_checksum (tree);
12078 /* When --enable-checking=fold, compute a digest of expr before
12079 and after actual fold call to see if fold did not accidentally
12080 change original expr. */
12086 struct md5_ctx ctx;
12087 unsigned char checksum_before[16], checksum_after[16];
12090 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12091 md5_init_ctx (&ctx);
12092 fold_checksum_tree (expr, &ctx, ht);
12093 md5_finish_ctx (&ctx, checksum_before);
12096 ret = fold_1 (expr);
12098 md5_init_ctx (&ctx);
12099 fold_checksum_tree (expr, &ctx, ht);
12100 md5_finish_ctx (&ctx, checksum_after);
12103 if (memcmp (checksum_before, checksum_after, 16))
12104 fold_check_failed (expr, ret);
12110 print_fold_checksum (tree expr)
12112 struct md5_ctx ctx;
12113 unsigned char checksum[16], cnt;
12116 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12117 md5_init_ctx (&ctx);
12118 fold_checksum_tree (expr, &ctx, ht);
12119 md5_finish_ctx (&ctx, checksum);
12121 for (cnt = 0; cnt < 16; ++cnt)
12122 fprintf (stderr, "%02x", checksum[cnt]);
12123 putc ('\n', stderr);
12127 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12129 internal_error ("fold check: original tree changed by fold");
12133 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12136 enum tree_code code;
12137 struct tree_function_decl buf;
12142 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12143 <= sizeof (struct tree_function_decl))
12144 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12147 slot = htab_find_slot (ht, expr, INSERT);
12151 code = TREE_CODE (expr);
12152 if (TREE_CODE_CLASS (code) == tcc_declaration
12153 && DECL_ASSEMBLER_NAME_SET_P (expr))
12155 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12156 memcpy ((char *) &buf, expr, tree_size (expr));
12157 expr = (tree) &buf;
12158 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12160 else if (TREE_CODE_CLASS (code) == tcc_type
12161 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12162 || TYPE_CACHED_VALUES_P (expr)
12163 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12165 /* Allow these fields to be modified. */
12166 memcpy ((char *) &buf, expr, tree_size (expr));
12167 expr = (tree) &buf;
12168 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12169 TYPE_POINTER_TO (expr) = NULL;
12170 TYPE_REFERENCE_TO (expr) = NULL;
12171 if (TYPE_CACHED_VALUES_P (expr))
12173 TYPE_CACHED_VALUES_P (expr) = 0;
12174 TYPE_CACHED_VALUES (expr) = NULL;
12177 md5_process_bytes (expr, tree_size (expr), ctx);
12178 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12179 if (TREE_CODE_CLASS (code) != tcc_type
12180 && TREE_CODE_CLASS (code) != tcc_declaration
12181 && code != TREE_LIST)
12182 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12183 switch (TREE_CODE_CLASS (code))
12189 md5_process_bytes (TREE_STRING_POINTER (expr),
12190 TREE_STRING_LENGTH (expr), ctx);
12193 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12194 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12197 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12203 case tcc_exceptional:
12207 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12208 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12209 expr = TREE_CHAIN (expr);
12210 goto recursive_label;
12213 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12214 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12220 case tcc_expression:
12221 case tcc_reference:
12222 case tcc_comparison:
12225 case tcc_statement:
12226 len = TREE_CODE_LENGTH (code);
12227 for (i = 0; i < len; ++i)
12228 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12230 case tcc_declaration:
12231 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12232 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12233 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12235 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12236 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12237 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12238 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12239 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12241 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12242 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12244 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12246 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12247 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12248 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12252 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12253 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12254 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12255 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12256 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12257 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12258 if (INTEGRAL_TYPE_P (expr)
12259 || SCALAR_FLOAT_TYPE_P (expr))
12261 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12262 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12264 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12265 if (TREE_CODE (expr) == RECORD_TYPE
12266 || TREE_CODE (expr) == UNION_TYPE
12267 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12268 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12269 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12278 /* Fold a unary tree expression with code CODE of type TYPE with an
12279 operand OP0. Return a folded expression if successful. Otherwise,
12280 return a tree expression with code CODE of type TYPE with an
12284 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12287 #ifdef ENABLE_FOLD_CHECKING
12288 unsigned char checksum_before[16], checksum_after[16];
12289 struct md5_ctx ctx;
12292 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12293 md5_init_ctx (&ctx);
12294 fold_checksum_tree (op0, &ctx, ht);
12295 md5_finish_ctx (&ctx, checksum_before);
12299 tem = fold_unary (code, type, op0);
12301 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12303 #ifdef ENABLE_FOLD_CHECKING
12304 md5_init_ctx (&ctx);
12305 fold_checksum_tree (op0, &ctx, ht);
12306 md5_finish_ctx (&ctx, checksum_after);
12309 if (memcmp (checksum_before, checksum_after, 16))
12310 fold_check_failed (op0, tem);
12315 /* Fold a binary tree expression with code CODE of type TYPE with
12316 operands OP0 and OP1. Return a folded expression if successful.
12317 Otherwise, return a tree expression with code CODE of type TYPE
12318 with operands OP0 and OP1. */
12321 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12325 #ifdef ENABLE_FOLD_CHECKING
12326 unsigned char checksum_before_op0[16],
12327 checksum_before_op1[16],
12328 checksum_after_op0[16],
12329 checksum_after_op1[16];
12330 struct md5_ctx ctx;
12333 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12334 md5_init_ctx (&ctx);
12335 fold_checksum_tree (op0, &ctx, ht);
12336 md5_finish_ctx (&ctx, checksum_before_op0);
12339 md5_init_ctx (&ctx);
12340 fold_checksum_tree (op1, &ctx, ht);
12341 md5_finish_ctx (&ctx, checksum_before_op1);
12345 tem = fold_binary (code, type, op0, op1);
12347 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12349 #ifdef ENABLE_FOLD_CHECKING
12350 md5_init_ctx (&ctx);
12351 fold_checksum_tree (op0, &ctx, ht);
12352 md5_finish_ctx (&ctx, checksum_after_op0);
12355 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12356 fold_check_failed (op0, tem);
12358 md5_init_ctx (&ctx);
12359 fold_checksum_tree (op1, &ctx, ht);
12360 md5_finish_ctx (&ctx, checksum_after_op1);
12363 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12364 fold_check_failed (op1, tem);
12369 /* Fold a ternary tree expression with code CODE of type TYPE with
12370 operands OP0, OP1, and OP2. Return a folded expression if
12371 successful. Otherwise, return a tree expression with code CODE of
12372 type TYPE with operands OP0, OP1, and OP2. */
12375 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12379 #ifdef ENABLE_FOLD_CHECKING
12380 unsigned char checksum_before_op0[16],
12381 checksum_before_op1[16],
12382 checksum_before_op2[16],
12383 checksum_after_op0[16],
12384 checksum_after_op1[16],
12385 checksum_after_op2[16];
12386 struct md5_ctx ctx;
12389 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12390 md5_init_ctx (&ctx);
12391 fold_checksum_tree (op0, &ctx, ht);
12392 md5_finish_ctx (&ctx, checksum_before_op0);
12395 md5_init_ctx (&ctx);
12396 fold_checksum_tree (op1, &ctx, ht);
12397 md5_finish_ctx (&ctx, checksum_before_op1);
12400 md5_init_ctx (&ctx);
12401 fold_checksum_tree (op2, &ctx, ht);
12402 md5_finish_ctx (&ctx, checksum_before_op2);
12406 tem = fold_ternary (code, type, op0, op1, op2);
12408 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12410 #ifdef ENABLE_FOLD_CHECKING
12411 md5_init_ctx (&ctx);
12412 fold_checksum_tree (op0, &ctx, ht);
12413 md5_finish_ctx (&ctx, checksum_after_op0);
12416 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12417 fold_check_failed (op0, tem);
12419 md5_init_ctx (&ctx);
12420 fold_checksum_tree (op1, &ctx, ht);
12421 md5_finish_ctx (&ctx, checksum_after_op1);
12424 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12425 fold_check_failed (op1, tem);
12427 md5_init_ctx (&ctx);
12428 fold_checksum_tree (op2, &ctx, ht);
12429 md5_finish_ctx (&ctx, checksum_after_op2);
12432 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12433 fold_check_failed (op2, tem);
12438 /* Perform constant folding and related simplification of initializer
12439 expression EXPR. These behave identically to "fold_buildN" but ignore
12440 potential run-time traps and exceptions that fold must preserve. */
12442 #define START_FOLD_INIT \
12443 int saved_signaling_nans = flag_signaling_nans;\
12444 int saved_trapping_math = flag_trapping_math;\
12445 int saved_rounding_math = flag_rounding_math;\
12446 int saved_trapv = flag_trapv;\
12447 int saved_folding_initializer = folding_initializer;\
12448 flag_signaling_nans = 0;\
12449 flag_trapping_math = 0;\
12450 flag_rounding_math = 0;\
12452 folding_initializer = 1;
12454 #define END_FOLD_INIT \
12455 flag_signaling_nans = saved_signaling_nans;\
12456 flag_trapping_math = saved_trapping_math;\
12457 flag_rounding_math = saved_rounding_math;\
12458 flag_trapv = saved_trapv;\
12459 folding_initializer = saved_folding_initializer;
12462 fold_build1_initializer (enum tree_code code, tree type, tree op)
12467 result = fold_build1 (code, type, op);
12474 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12479 result = fold_build2 (code, type, op0, op1);
12486 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12492 result = fold_build3 (code, type, op0, op1, op2);
12498 #undef START_FOLD_INIT
12499 #undef END_FOLD_INIT
12501 /* Determine if first argument is a multiple of second argument. Return 0 if
12502 it is not, or we cannot easily determined it to be.
12504 An example of the sort of thing we care about (at this point; this routine
12505 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12506 fold cases do now) is discovering that
12508 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12514 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12516 This code also handles discovering that
12518 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12520 is a multiple of 8 so we don't have to worry about dealing with a
12521 possible remainder.
12523 Note that we *look* inside a SAVE_EXPR only to determine how it was
12524 calculated; it is not safe for fold to do much of anything else with the
12525 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12526 at run time. For example, the latter example above *cannot* be implemented
12527 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12528 evaluation time of the original SAVE_EXPR is not necessarily the same at
12529 the time the new expression is evaluated. The only optimization of this
12530 sort that would be valid is changing
12532 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12536 SAVE_EXPR (I) * SAVE_EXPR (J)
12538 (where the same SAVE_EXPR (J) is used in the original and the
12539 transformed version). */
12542 multiple_of_p (tree type, tree top, tree bottom)
12544 if (operand_equal_p (top, bottom, 0))
12547 if (TREE_CODE (type) != INTEGER_TYPE)
12550 switch (TREE_CODE (top))
12553 /* Bitwise and provides a power of two multiple. If the mask is
12554 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12555 if (!integer_pow2p (bottom))
12560 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12561 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12565 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12566 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12569 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12573 op1 = TREE_OPERAND (top, 1);
12574 /* const_binop may not detect overflow correctly,
12575 so check for it explicitly here. */
12576 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12577 > TREE_INT_CST_LOW (op1)
12578 && TREE_INT_CST_HIGH (op1) == 0
12579 && 0 != (t1 = fold_convert (type,
12580 const_binop (LSHIFT_EXPR,
12583 && !TREE_OVERFLOW (t1))
12584 return multiple_of_p (type, t1, bottom);
12589 /* Can't handle conversions from non-integral or wider integral type. */
12590 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12591 || (TYPE_PRECISION (type)
12592 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12595 /* .. fall through ... */
12598 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12601 if (TREE_CODE (bottom) != INTEGER_CST
12602 || (TYPE_UNSIGNED (type)
12603 && (tree_int_cst_sgn (top) < 0
12604 || tree_int_cst_sgn (bottom) < 0)))
12606 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
12614 /* Return true if `t' is known to be non-negative. */
12617 tree_expr_nonnegative_p (tree t)
12619 if (t == error_mark_node)
12622 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12625 switch (TREE_CODE (t))
12628 /* Query VRP to see if it has recorded any information about
12629 the range of this object. */
12630 return ssa_name_nonnegative_p (t);
12633 /* We can't return 1 if flag_wrapv is set because
12634 ABS_EXPR<INT_MIN> = INT_MIN. */
12635 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12637 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12642 return tree_int_cst_sgn (t) >= 0;
12645 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12648 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12649 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12650 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12652 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12653 both unsigned and at least 2 bits shorter than the result. */
12654 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12655 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12656 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12658 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12659 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12660 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12661 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12663 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12664 TYPE_PRECISION (inner2)) + 1;
12665 return prec < TYPE_PRECISION (TREE_TYPE (t));
12671 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12673 /* x * x for floating point x is always non-negative. */
12674 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12676 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12677 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12680 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12681 both unsigned and their total bits is shorter than the result. */
12682 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12683 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12684 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12686 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12687 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12688 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12689 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12690 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12691 < TYPE_PRECISION (TREE_TYPE (t));
12697 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12698 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12704 case TRUNC_DIV_EXPR:
12705 case CEIL_DIV_EXPR:
12706 case FLOOR_DIV_EXPR:
12707 case ROUND_DIV_EXPR:
12708 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12709 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12711 case TRUNC_MOD_EXPR:
12712 case CEIL_MOD_EXPR:
12713 case FLOOR_MOD_EXPR:
12714 case ROUND_MOD_EXPR:
12716 case NON_LVALUE_EXPR:
12718 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12720 case COMPOUND_EXPR:
12722 case GIMPLE_MODIFY_STMT:
12723 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12726 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12729 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12730 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12734 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12735 tree outer_type = TREE_TYPE (t);
12737 if (TREE_CODE (outer_type) == REAL_TYPE)
12739 if (TREE_CODE (inner_type) == REAL_TYPE)
12740 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12741 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12743 if (TYPE_UNSIGNED (inner_type))
12745 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12748 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12750 if (TREE_CODE (inner_type) == REAL_TYPE)
12751 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12752 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12753 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12754 && TYPE_UNSIGNED (inner_type);
12761 tree temp = TARGET_EXPR_SLOT (t);
12762 t = TARGET_EXPR_INITIAL (t);
12764 /* If the initializer is non-void, then it's a normal expression
12765 that will be assigned to the slot. */
12766 if (!VOID_TYPE_P (t))
12767 return tree_expr_nonnegative_p (t);
12769 /* Otherwise, the initializer sets the slot in some way. One common
12770 way is an assignment statement at the end of the initializer. */
12773 if (TREE_CODE (t) == BIND_EXPR)
12774 t = expr_last (BIND_EXPR_BODY (t));
12775 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12776 || TREE_CODE (t) == TRY_CATCH_EXPR)
12777 t = expr_last (TREE_OPERAND (t, 0));
12778 else if (TREE_CODE (t) == STATEMENT_LIST)
12783 if ((TREE_CODE (t) == MODIFY_EXPR
12784 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12785 && GENERIC_TREE_OPERAND (t, 0) == temp)
12786 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12793 tree fndecl = get_callee_fndecl (t);
12794 tree arglist = TREE_OPERAND (t, 1);
12795 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12796 switch (DECL_FUNCTION_CODE (fndecl))
12798 CASE_FLT_FN (BUILT_IN_ACOS):
12799 CASE_FLT_FN (BUILT_IN_ACOSH):
12800 CASE_FLT_FN (BUILT_IN_CABS):
12801 CASE_FLT_FN (BUILT_IN_COSH):
12802 CASE_FLT_FN (BUILT_IN_ERFC):
12803 CASE_FLT_FN (BUILT_IN_EXP):
12804 CASE_FLT_FN (BUILT_IN_EXP10):
12805 CASE_FLT_FN (BUILT_IN_EXP2):
12806 CASE_FLT_FN (BUILT_IN_FABS):
12807 CASE_FLT_FN (BUILT_IN_FDIM):
12808 CASE_FLT_FN (BUILT_IN_HYPOT):
12809 CASE_FLT_FN (BUILT_IN_POW10):
12810 CASE_INT_FN (BUILT_IN_FFS):
12811 CASE_INT_FN (BUILT_IN_PARITY):
12812 CASE_INT_FN (BUILT_IN_POPCOUNT):
12813 case BUILT_IN_BSWAP32:
12814 case BUILT_IN_BSWAP64:
12818 CASE_FLT_FN (BUILT_IN_SQRT):
12819 /* sqrt(-0.0) is -0.0. */
12820 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12822 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12824 CASE_FLT_FN (BUILT_IN_ASINH):
12825 CASE_FLT_FN (BUILT_IN_ATAN):
12826 CASE_FLT_FN (BUILT_IN_ATANH):
12827 CASE_FLT_FN (BUILT_IN_CBRT):
12828 CASE_FLT_FN (BUILT_IN_CEIL):
12829 CASE_FLT_FN (BUILT_IN_ERF):
12830 CASE_FLT_FN (BUILT_IN_EXPM1):
12831 CASE_FLT_FN (BUILT_IN_FLOOR):
12832 CASE_FLT_FN (BUILT_IN_FMOD):
12833 CASE_FLT_FN (BUILT_IN_FREXP):
12834 CASE_FLT_FN (BUILT_IN_LCEIL):
12835 CASE_FLT_FN (BUILT_IN_LDEXP):
12836 CASE_FLT_FN (BUILT_IN_LFLOOR):
12837 CASE_FLT_FN (BUILT_IN_LLCEIL):
12838 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12839 CASE_FLT_FN (BUILT_IN_LLRINT):
12840 CASE_FLT_FN (BUILT_IN_LLROUND):
12841 CASE_FLT_FN (BUILT_IN_LRINT):
12842 CASE_FLT_FN (BUILT_IN_LROUND):
12843 CASE_FLT_FN (BUILT_IN_MODF):
12844 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12845 CASE_FLT_FN (BUILT_IN_RINT):
12846 CASE_FLT_FN (BUILT_IN_ROUND):
12847 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12848 CASE_FLT_FN (BUILT_IN_SINH):
12849 CASE_FLT_FN (BUILT_IN_TANH):
12850 CASE_FLT_FN (BUILT_IN_TRUNC):
12851 /* True if the 1st argument is nonnegative. */
12852 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12854 CASE_FLT_FN (BUILT_IN_FMAX):
12855 /* True if the 1st OR 2nd arguments are nonnegative. */
12856 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12857 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12859 CASE_FLT_FN (BUILT_IN_FMIN):
12860 /* True if the 1st AND 2nd arguments are nonnegative. */
12861 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12862 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12864 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12865 /* True if the 2nd argument is nonnegative. */
12866 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12868 CASE_FLT_FN (BUILT_IN_POWI):
12869 /* True if the 1st argument is nonnegative or the second
12870 argument is an even integer. */
12871 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12873 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12874 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12877 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12879 CASE_FLT_FN (BUILT_IN_POW):
12880 /* True if the 1st argument is nonnegative or the second
12881 argument is an even integer valued real. */
12882 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12887 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12888 n = real_to_integer (&c);
12891 REAL_VALUE_TYPE cint;
12892 real_from_integer (&cint, VOIDmode, n,
12893 n < 0 ? -1 : 0, 0);
12894 if (real_identical (&c, &cint))
12898 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12905 /* ... fall through ... */
12908 if (truth_value_p (TREE_CODE (t)))
12909 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12913 /* We don't know sign of `t', so be conservative and return false. */
12917 /* Return true when T is an address and is known to be nonzero.
12918 For floating point we further ensure that T is not denormal.
12919 Similar logic is present in nonzero_address in rtlanal.h. */
12922 tree_expr_nonzero_p (tree t)
12924 tree type = TREE_TYPE (t);
12926 /* Doing something useful for floating point would need more work. */
12927 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12930 switch (TREE_CODE (t))
12933 /* Query VRP to see if it has recorded any information about
12934 the range of this object. */
12935 return ssa_name_nonzero_p (t);
12938 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12941 return !integer_zerop (t);
12944 if (TYPE_OVERFLOW_UNDEFINED (type))
12946 /* With the presence of negative values it is hard
12947 to say something. */
12948 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12949 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12951 /* One of operands must be positive and the other non-negative. */
12952 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12953 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12958 if (TYPE_OVERFLOW_UNDEFINED (type))
12960 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12961 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12967 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12968 tree outer_type = TREE_TYPE (t);
12970 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12971 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12977 tree base = get_base_address (TREE_OPERAND (t, 0));
12982 /* Weak declarations may link to NULL. */
12983 if (VAR_OR_FUNCTION_DECL_P (base))
12984 return !DECL_WEAK (base);
12986 /* Constants are never weak. */
12987 if (CONSTANT_CLASS_P (base))
12994 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12995 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12998 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12999 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
13002 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
13004 /* When both operands are nonzero, then MAX must be too. */
13005 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
13008 /* MAX where operand 0 is positive is positive. */
13009 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
13011 /* MAX where operand 1 is positive is positive. */
13012 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
13013 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
13017 case COMPOUND_EXPR:
13019 case GIMPLE_MODIFY_STMT:
13021 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
13024 case NON_LVALUE_EXPR:
13025 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
13028 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
13029 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
13032 return alloca_call_p (t);
13040 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13041 attempt to fold the expression to a constant without modifying TYPE,
13044 If the expression could be simplified to a constant, then return
13045 the constant. If the expression would not be simplified to a
13046 constant, then return NULL_TREE. */
13049 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13051 tree tem = fold_binary (code, type, op0, op1);
13052 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13055 /* Given the components of a unary expression CODE, TYPE and OP0,
13056 attempt to fold the expression to a constant without modifying
13059 If the expression could be simplified to a constant, then return
13060 the constant. If the expression would not be simplified to a
13061 constant, then return NULL_TREE. */
13064 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13066 tree tem = fold_unary (code, type, op0);
13067 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13070 /* If EXP represents referencing an element in a constant string
13071 (either via pointer arithmetic or array indexing), return the
13072 tree representing the value accessed, otherwise return NULL. */
13075 fold_read_from_constant_string (tree exp)
13077 if ((TREE_CODE (exp) == INDIRECT_REF
13078 || TREE_CODE (exp) == ARRAY_REF)
13079 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13081 tree exp1 = TREE_OPERAND (exp, 0);
13085 if (TREE_CODE (exp) == INDIRECT_REF)
13086 string = string_constant (exp1, &index);
13089 tree low_bound = array_ref_low_bound (exp);
13090 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13092 /* Optimize the special-case of a zero lower bound.
13094 We convert the low_bound to sizetype to avoid some problems
13095 with constant folding. (E.g. suppose the lower bound is 1,
13096 and its mode is QI. Without the conversion,l (ARRAY
13097 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13098 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13099 if (! integer_zerop (low_bound))
13100 index = size_diffop (index, fold_convert (sizetype, low_bound));
13106 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13107 && TREE_CODE (string) == STRING_CST
13108 && TREE_CODE (index) == INTEGER_CST
13109 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13110 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13112 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13113 return fold_convert (TREE_TYPE (exp),
13114 build_int_cst (NULL_TREE,
13115 (TREE_STRING_POINTER (string)
13116 [TREE_INT_CST_LOW (index)])));
13121 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13122 an integer constant or real constant.
13124 TYPE is the type of the result. */
13127 fold_negate_const (tree arg0, tree type)
13129 tree t = NULL_TREE;
13131 switch (TREE_CODE (arg0))
13135 unsigned HOST_WIDE_INT low;
13136 HOST_WIDE_INT high;
13137 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13138 TREE_INT_CST_HIGH (arg0),
13140 t = force_fit_type_double (type, low, high, 1,
13141 (overflow | TREE_OVERFLOW (arg0))
13142 && !TYPE_UNSIGNED (type));
13147 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13151 gcc_unreachable ();
13157 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13158 an integer constant or real constant.
13160 TYPE is the type of the result. */
13163 fold_abs_const (tree arg0, tree type)
13165 tree t = NULL_TREE;
13167 switch (TREE_CODE (arg0))
13170 /* If the value is unsigned, then the absolute value is
13171 the same as the ordinary value. */
13172 if (TYPE_UNSIGNED (type))
13174 /* Similarly, if the value is non-negative. */
13175 else if (INT_CST_LT (integer_minus_one_node, arg0))
13177 /* If the value is negative, then the absolute value is
13181 unsigned HOST_WIDE_INT low;
13182 HOST_WIDE_INT high;
13183 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13184 TREE_INT_CST_HIGH (arg0),
13186 t = force_fit_type_double (type, low, high, -1,
13187 overflow | TREE_OVERFLOW (arg0));
13192 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13193 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13199 gcc_unreachable ();
13205 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13206 constant. TYPE is the type of the result. */
13209 fold_not_const (tree arg0, tree type)
13211 tree t = NULL_TREE;
13213 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13215 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13216 ~TREE_INT_CST_HIGH (arg0), 0,
13217 TREE_OVERFLOW (arg0));
13222 /* Given CODE, a relational operator, the target type, TYPE and two
13223 constant operands OP0 and OP1, return the result of the
13224 relational operation. If the result is not a compile time
13225 constant, then return NULL_TREE. */
13228 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13230 int result, invert;
13232 /* From here on, the only cases we handle are when the result is
13233 known to be a constant. */
13235 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13237 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13238 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13240 /* Handle the cases where either operand is a NaN. */
13241 if (real_isnan (c0) || real_isnan (c1))
13251 case UNORDERED_EXPR:
13265 if (flag_trapping_math)
13271 gcc_unreachable ();
13274 return constant_boolean_node (result, type);
13277 return constant_boolean_node (real_compare (code, c0, c1), type);
13280 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13282 To compute GT, swap the arguments and do LT.
13283 To compute GE, do LT and invert the result.
13284 To compute LE, swap the arguments, do LT and invert the result.
13285 To compute NE, do EQ and invert the result.
13287 Therefore, the code below must handle only EQ and LT. */
13289 if (code == LE_EXPR || code == GT_EXPR)
13294 code = swap_tree_comparison (code);
13297 /* Note that it is safe to invert for real values here because we
13298 have already handled the one case that it matters. */
13301 if (code == NE_EXPR || code == GE_EXPR)
13304 code = invert_tree_comparison (code, false);
13307 /* Compute a result for LT or EQ if args permit;
13308 Otherwise return T. */
13309 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13311 if (code == EQ_EXPR)
13312 result = tree_int_cst_equal (op0, op1);
13313 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13314 result = INT_CST_LT_UNSIGNED (op0, op1);
13316 result = INT_CST_LT (op0, op1);
13323 return constant_boolean_node (result, type);
13326 /* Build an expression for the a clean point containing EXPR with type TYPE.
13327 Don't build a cleanup point expression for EXPR which don't have side
13331 fold_build_cleanup_point_expr (tree type, tree expr)
13333 /* If the expression does not have side effects then we don't have to wrap
13334 it with a cleanup point expression. */
13335 if (!TREE_SIDE_EFFECTS (expr))
13338 /* If the expression is a return, check to see if the expression inside the
13339 return has no side effects or the right hand side of the modify expression
13340 inside the return. If either don't have side effects set we don't need to
13341 wrap the expression in a cleanup point expression. Note we don't check the
13342 left hand side of the modify because it should always be a return decl. */
13343 if (TREE_CODE (expr) == RETURN_EXPR)
13345 tree op = TREE_OPERAND (expr, 0);
13346 if (!op || !TREE_SIDE_EFFECTS (op))
13348 op = TREE_OPERAND (op, 1);
13349 if (!TREE_SIDE_EFFECTS (op))
13353 return build1 (CLEANUP_POINT_EXPR, type, expr);
13356 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13357 avoid confusing the gimplify process. */
13360 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13362 /* The size of the object is not relevant when talking about its address. */
13363 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13364 t = TREE_OPERAND (t, 0);
13366 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13367 if (TREE_CODE (t) == INDIRECT_REF
13368 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13370 t = TREE_OPERAND (t, 0);
13371 if (TREE_TYPE (t) != ptrtype)
13372 t = build1 (NOP_EXPR, ptrtype, t);
13378 while (handled_component_p (base))
13379 base = TREE_OPERAND (base, 0);
13381 TREE_ADDRESSABLE (base) = 1;
13383 t = build1 (ADDR_EXPR, ptrtype, t);
13390 build_fold_addr_expr (tree t)
13392 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13395 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13396 of an indirection through OP0, or NULL_TREE if no simplification is
13400 fold_indirect_ref_1 (tree type, tree op0)
13406 subtype = TREE_TYPE (sub);
13407 if (!POINTER_TYPE_P (subtype))
13410 if (TREE_CODE (sub) == ADDR_EXPR)
13412 tree op = TREE_OPERAND (sub, 0);
13413 tree optype = TREE_TYPE (op);
13414 /* *&CONST_DECL -> to the value of the const decl. */
13415 if (TREE_CODE (op) == CONST_DECL)
13416 return DECL_INITIAL (op);
13417 /* *&p => p; make sure to handle *&"str"[cst] here. */
13418 if (type == optype)
13420 tree fop = fold_read_from_constant_string (op);
13426 /* *(foo *)&fooarray => fooarray[0] */
13427 else if (TREE_CODE (optype) == ARRAY_TYPE
13428 && type == TREE_TYPE (optype))
13430 tree type_domain = TYPE_DOMAIN (optype);
13431 tree min_val = size_zero_node;
13432 if (type_domain && TYPE_MIN_VALUE (type_domain))
13433 min_val = TYPE_MIN_VALUE (type_domain);
13434 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13436 /* *(foo *)&complexfoo => __real__ complexfoo */
13437 else if (TREE_CODE (optype) == COMPLEX_TYPE
13438 && type == TREE_TYPE (optype))
13439 return fold_build1 (REALPART_EXPR, type, op);
13440 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13441 else if (TREE_CODE (optype) == VECTOR_TYPE
13442 && type == TREE_TYPE (optype))
13444 tree part_width = TYPE_SIZE (type);
13445 tree index = bitsize_int (0);
13446 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13450 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13451 if (TREE_CODE (sub) == PLUS_EXPR
13452 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13454 tree op00 = TREE_OPERAND (sub, 0);
13455 tree op01 = TREE_OPERAND (sub, 1);
13459 op00type = TREE_TYPE (op00);
13460 if (TREE_CODE (op00) == ADDR_EXPR
13461 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13462 && type == TREE_TYPE (TREE_TYPE (op00type)))
13464 tree size = TYPE_SIZE_UNIT (type);
13465 if (tree_int_cst_equal (size, op01))
13466 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13470 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13471 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13472 && type == TREE_TYPE (TREE_TYPE (subtype)))
13475 tree min_val = size_zero_node;
13476 sub = build_fold_indirect_ref (sub);
13477 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13478 if (type_domain && TYPE_MIN_VALUE (type_domain))
13479 min_val = TYPE_MIN_VALUE (type_domain);
13480 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13486 /* Builds an expression for an indirection through T, simplifying some
13490 build_fold_indirect_ref (tree t)
13492 tree type = TREE_TYPE (TREE_TYPE (t));
13493 tree sub = fold_indirect_ref_1 (type, t);
13498 return build1 (INDIRECT_REF, type, t);
13501 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13504 fold_indirect_ref (tree t)
13506 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13514 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13515 whose result is ignored. The type of the returned tree need not be
13516 the same as the original expression. */
13519 fold_ignored_result (tree t)
13521 if (!TREE_SIDE_EFFECTS (t))
13522 return integer_zero_node;
13525 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13528 t = TREE_OPERAND (t, 0);
13532 case tcc_comparison:
13533 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13534 t = TREE_OPERAND (t, 0);
13535 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13536 t = TREE_OPERAND (t, 1);
13541 case tcc_expression:
13542 switch (TREE_CODE (t))
13544 case COMPOUND_EXPR:
13545 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13547 t = TREE_OPERAND (t, 0);
13551 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13552 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13554 t = TREE_OPERAND (t, 0);
13567 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13568 This can only be applied to objects of a sizetype. */
13571 round_up (tree value, int divisor)
13573 tree div = NULL_TREE;
13575 gcc_assert (divisor > 0);
13579 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13580 have to do anything. Only do this when we are not given a const,
13581 because in that case, this check is more expensive than just
13583 if (TREE_CODE (value) != INTEGER_CST)
13585 div = build_int_cst (TREE_TYPE (value), divisor);
13587 if (multiple_of_p (TREE_TYPE (value), value, div))
13591 /* If divisor is a power of two, simplify this to bit manipulation. */
13592 if (divisor == (divisor & -divisor))
13594 if (TREE_CODE (value) == INTEGER_CST)
13596 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
13597 unsigned HOST_WIDE_INT high;
13600 if ((low & (divisor - 1)) == 0)
13603 overflow_p = TREE_OVERFLOW (value);
13604 high = TREE_INT_CST_HIGH (value);
13605 low &= ~(divisor - 1);
13614 return force_fit_type_double (TREE_TYPE (value), low, high,
13621 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13622 value = size_binop (PLUS_EXPR, value, t);
13623 t = build_int_cst (TREE_TYPE (value), -divisor);
13624 value = size_binop (BIT_AND_EXPR, value, t);
13630 div = build_int_cst (TREE_TYPE (value), divisor);
13631 value = size_binop (CEIL_DIV_EXPR, value, div);
13632 value = size_binop (MULT_EXPR, value, div);
13638 /* Likewise, but round down. */
13641 round_down (tree value, int divisor)
13643 tree div = NULL_TREE;
13645 gcc_assert (divisor > 0);
13649 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13650 have to do anything. Only do this when we are not given a const,
13651 because in that case, this check is more expensive than just
13653 if (TREE_CODE (value) != INTEGER_CST)
13655 div = build_int_cst (TREE_TYPE (value), divisor);
13657 if (multiple_of_p (TREE_TYPE (value), value, div))
13661 /* If divisor is a power of two, simplify this to bit manipulation. */
13662 if (divisor == (divisor & -divisor))
13666 t = build_int_cst (TREE_TYPE (value), -divisor);
13667 value = size_binop (BIT_AND_EXPR, value, t);
13672 div = build_int_cst (TREE_TYPE (value), divisor);
13673 value = size_binop (FLOOR_DIV_EXPR, value, div);
13674 value = size_binop (MULT_EXPR, value, div);
13680 /* Returns the pointer to the base of the object addressed by EXP and
13681 extracts the information about the offset of the access, storing it
13682 to PBITPOS and POFFSET. */
13685 split_address_to_core_and_offset (tree exp,
13686 HOST_WIDE_INT *pbitpos, tree *poffset)
13689 enum machine_mode mode;
13690 int unsignedp, volatilep;
13691 HOST_WIDE_INT bitsize;
13693 if (TREE_CODE (exp) == ADDR_EXPR)
13695 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13696 poffset, &mode, &unsignedp, &volatilep,
13698 core = build_fold_addr_expr (core);
13704 *poffset = NULL_TREE;
13710 /* Returns true if addresses of E1 and E2 differ by a constant, false
13711 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13714 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13717 HOST_WIDE_INT bitpos1, bitpos2;
13718 tree toffset1, toffset2, tdiff, type;
13720 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13721 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13723 if (bitpos1 % BITS_PER_UNIT != 0
13724 || bitpos2 % BITS_PER_UNIT != 0
13725 || !operand_equal_p (core1, core2, 0))
13728 if (toffset1 && toffset2)
13730 type = TREE_TYPE (toffset1);
13731 if (type != TREE_TYPE (toffset2))
13732 toffset2 = fold_convert (type, toffset2);
13734 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13735 if (!cst_and_fits_in_hwi (tdiff))
13738 *diff = int_cst_value (tdiff);
13740 else if (toffset1 || toffset2)
13742 /* If only one of the offsets is non-constant, the difference cannot
13749 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13753 /* Simplify the floating point expression EXP when the sign of the
13754 result is not significant. Return NULL_TREE if no simplification
13758 fold_strip_sign_ops (tree exp)
13762 switch (TREE_CODE (exp))
13766 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13767 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13771 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13773 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13774 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13775 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13776 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13777 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13778 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13781 case COMPOUND_EXPR:
13782 arg0 = TREE_OPERAND (exp, 0);
13783 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13785 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13789 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13790 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13792 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13793 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13794 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13799 const enum built_in_function fcode = builtin_mathfn_code (exp);
13802 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13803 /* Strip copysign function call, return the 1st argument. */
13804 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13805 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13806 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13809 /* Strip sign ops from the argument of "odd" math functions. */
13810 if (negate_mathfn_p (fcode))
13812 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13814 return build_function_call_expr (get_callee_fndecl (exp),
13815 build_tree_list (NULL_TREE,