1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /*@@ This file should be rewritten to use an arbitrary precision
24 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26 @@ The routines that translate from the ap rep should
27 @@ warn if precision et. al. is lost.
28 @@ This would also make life easier when this technology is used
29 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
32 and force_fit_type_double.
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type_double takes a constant, an overflowable flag and a
44 prior overflow indicator. It forces the value to fit the type and
47 Note: Since the folders get called on non-gimple code as well as
48 gimple code, we need to handle GIMPLE tuples as well as their
49 corresponding tree equivalents. */
53 #include "coretypes.h"
64 #include "langhooks.h"
67 /* Non-zero if we are folding constants inside an initializer; zero
69 int folding_initializer = 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code {
93 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
94 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static tree combine_comparisons (enum tree_code, enum tree_code,
104 enum tree_code, tree, tree, tree);
105 static int truth_value_p (enum tree_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, int, int, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static int all_ones_mask_p (tree, int);
117 static tree sign_bit_p (tree, tree);
118 static int simple_operand_p (tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
133 static int multiple_of_p (tree, tree, tree);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static bool fold_real_zero_addition_p (tree, tree, int);
138 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
140 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (tree, tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static int native_encode_expr (tree, unsigned char *, int);
147 static tree native_interpret_expr (tree, unsigned char *, int);
150 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
151 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
152 and SUM1. Then this yields nonzero if overflow occurred during the
155 Overflow occurs if A and B have the same sign, but A and SUM differ in
156 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
158 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
160 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
161 We do that by representing the two-word integer in 4 words, with only
162 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
163 number. The value of the word is LOWPART + HIGHPART * BASE. */
166 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
167 #define HIGHPART(x) \
168 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
169 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
171 /* Unpack a two-word integer into 4 words.
172 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
173 WORDS points to the array of HOST_WIDE_INTs. */
176 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
178 words[0] = LOWPART (low);
179 words[1] = HIGHPART (low);
180 words[2] = LOWPART (hi);
181 words[3] = HIGHPART (hi);
184 /* Pack an array of 4 words into a two-word integer.
185 WORDS points to the array of words.
186 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
189 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192 *low = words[0] + words[1] * BASE;
193 *hi = words[2] + words[3] * BASE;
196 /* Force the double-word integer L1, H1 to be within the range of the
197 integer type TYPE. Stores the properly truncated and sign-extended
198 double-word integer in *LV, *HV. Returns true if the operation
199 overflows, that is, argument and result are different. */
202 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
203 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
205 unsigned HOST_WIDE_INT low0 = l1;
206 HOST_WIDE_INT high0 = h1;
208 int sign_extended_type;
210 if (POINTER_TYPE_P (type)
211 || TREE_CODE (type) == OFFSET_TYPE)
214 prec = TYPE_PRECISION (type);
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (type)
218 || (TREE_CODE (type) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (type)));
221 /* First clear all bits that are beyond the type's precision. */
222 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 if (prec < HOST_BITS_PER_WIDE_INT)
230 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
233 /* Then do sign extension if necessary. */
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
240 /* Sign extend top half? */
241 if (h1 & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245 else if (prec == HOST_BITS_PER_WIDE_INT)
247 if ((HOST_WIDE_INT)l1 < 0)
252 /* Sign extend bottom half? */
253 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
256 l1 |= (HOST_WIDE_INT)(-1) << prec;
263 /* If the value didn't fit, signal overflow. */
264 return l1 != low0 || h1 != high0;
267 /* We force the double-int HIGH:LOW to the range of the type TYPE by
268 sign or zero extending it.
269 OVERFLOWABLE indicates if we are interested
270 in overflow of the value, when >0 we are only interested in signed
271 overflow, for <0 we are interested in any overflow. OVERFLOWED
272 indicates whether overflow has already occurred. CONST_OVERFLOWED
273 indicates whether constant overflow has already occurred. We force
274 T's value to be within range of T's type (by setting to 0 or 1 all
275 the bits outside the type's range). We set TREE_OVERFLOWED if,
276 OVERFLOWED is nonzero,
277 or OVERFLOWABLE is >0 and signed overflow occurs
278 or OVERFLOWABLE is <0 and any overflow occurs
279 We return a new tree node for the extended double-int. The node
280 is shared if no overflow flags are set. */
283 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
284 HOST_WIDE_INT high, int overflowable,
287 int sign_extended_type;
290 /* Size types *are* sign extended. */
291 sign_extended_type = (!TYPE_UNSIGNED (type)
292 || (TREE_CODE (type) == INTEGER_TYPE
293 && TYPE_IS_SIZETYPE (type)));
295 overflow = fit_double_type (low, high, &low, &high, type);
297 /* If we need to set overflow flags, return a new unshared node. */
298 if (overflowed || overflow)
302 || (overflowable > 0 && sign_extended_type))
304 tree t = make_node (INTEGER_CST);
305 TREE_INT_CST_LOW (t) = low;
306 TREE_INT_CST_HIGH (t) = high;
307 TREE_TYPE (t) = type;
308 TREE_OVERFLOW (t) = 1;
313 /* Else build a shared node. */
314 return build_int_cst_wide (type, low, high);
317 /* Add two doubleword integers with doubleword result.
318 Return nonzero if the operation overflows according to UNSIGNED_P.
319 Each argument is given as two `HOST_WIDE_INT' pieces.
320 One argument is L1 and H1; the other, L2 and H2.
321 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
324 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
325 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
326 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
329 unsigned HOST_WIDE_INT l;
333 h = h1 + h2 + (l < l1);
339 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
357 return (*hv & h1) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
395 for (j = 0; j < 4; j++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
449 if (SHIFT_COUNT_TRUNCATED)
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 unsigned HOST_WIDE_INT signmask;
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 if (SHIFT_COUNT_TRUNCATED)
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
707 { /* scale divisor and dividend */
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
786 decode (quo, lquo, hquo);
789 /* If result is negative, make it so. */
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)) */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, <wice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den < ltwice)))
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
888 tree type = TREE_TYPE (arg1);
889 int uns = TYPE_UNSIGNED (type);
891 int1l = TREE_INT_CST_LOW (arg1);
892 int1h = TREE_INT_CST_HIGH (arg1);
893 int2l = TREE_INT_CST_LOW (arg2);
894 int2h = TREE_INT_CST_HIGH (arg2);
896 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
897 &quol, &quoh, &reml, &remh);
898 if (remh != 0 || reml != 0)
901 return build_int_cst_wide (type, quol, quoh);
904 /* Return true if the built-in mathematical function specified by CODE
905 is odd, i.e. -f(x) == f(-x). */
908 negate_mathfn_p (enum built_in_function code)
912 CASE_FLT_FN (BUILT_IN_ASIN):
913 CASE_FLT_FN (BUILT_IN_ASINH):
914 CASE_FLT_FN (BUILT_IN_ATAN):
915 CASE_FLT_FN (BUILT_IN_ATANH):
916 CASE_FLT_FN (BUILT_IN_CASIN):
917 CASE_FLT_FN (BUILT_IN_CASINH):
918 CASE_FLT_FN (BUILT_IN_CATAN):
919 CASE_FLT_FN (BUILT_IN_CATANH):
920 CASE_FLT_FN (BUILT_IN_CBRT):
921 CASE_FLT_FN (BUILT_IN_CPROJ):
922 CASE_FLT_FN (BUILT_IN_CSIN):
923 CASE_FLT_FN (BUILT_IN_CSINH):
924 CASE_FLT_FN (BUILT_IN_CTAN):
925 CASE_FLT_FN (BUILT_IN_CTANH):
926 CASE_FLT_FN (BUILT_IN_ERF):
927 CASE_FLT_FN (BUILT_IN_LLROUND):
928 CASE_FLT_FN (BUILT_IN_LROUND):
929 CASE_FLT_FN (BUILT_IN_ROUND):
930 CASE_FLT_FN (BUILT_IN_SIN):
931 CASE_FLT_FN (BUILT_IN_SINH):
932 CASE_FLT_FN (BUILT_IN_TAN):
933 CASE_FLT_FN (BUILT_IN_TANH):
934 CASE_FLT_FN (BUILT_IN_TRUNC):
937 CASE_FLT_FN (BUILT_IN_LLRINT):
938 CASE_FLT_FN (BUILT_IN_LRINT):
939 CASE_FLT_FN (BUILT_IN_NEARBYINT):
940 CASE_FLT_FN (BUILT_IN_RINT):
941 return !flag_rounding_math;
949 /* Check whether we may negate an integer constant T without causing
953 may_negate_without_overflow_p (tree t)
955 unsigned HOST_WIDE_INT val;
959 gcc_assert (TREE_CODE (t) == INTEGER_CST);
961 type = TREE_TYPE (t);
962 if (TYPE_UNSIGNED (type))
965 prec = TYPE_PRECISION (type);
966 if (prec > HOST_BITS_PER_WIDE_INT)
968 if (TREE_INT_CST_LOW (t) != 0)
970 prec -= HOST_BITS_PER_WIDE_INT;
971 val = TREE_INT_CST_HIGH (t);
974 val = TREE_INT_CST_LOW (t);
975 if (prec < HOST_BITS_PER_WIDE_INT)
976 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
977 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
980 /* Determine whether an expression T can be cheaply negated using
981 the function negate_expr without introducing undefined overflow. */
984 negate_expr_p (tree t)
991 type = TREE_TYPE (t);
994 switch (TREE_CODE (t))
997 if (TYPE_OVERFLOW_WRAPS (type))
1000 /* Check that -CST will not overflow type. */
1001 return may_negate_without_overflow_p (t);
1003 return (INTEGRAL_TYPE_P (type)
1004 && TYPE_OVERFLOW_WRAPS (type));
1011 return negate_expr_p (TREE_REALPART (t))
1012 && negate_expr_p (TREE_IMAGPART (t));
1015 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1016 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1018 /* -(A + B) -> (-B) - A. */
1019 if (negate_expr_p (TREE_OPERAND (t, 1))
1020 && reorder_operands_p (TREE_OPERAND (t, 0),
1021 TREE_OPERAND (t, 1)))
1023 /* -(A + B) -> (-A) - B. */
1024 return negate_expr_p (TREE_OPERAND (t, 0));
1027 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1028 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1029 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1030 && reorder_operands_p (TREE_OPERAND (t, 0),
1031 TREE_OPERAND (t, 1));
1034 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1040 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1041 return negate_expr_p (TREE_OPERAND (t, 1))
1042 || negate_expr_p (TREE_OPERAND (t, 0));
1045 case TRUNC_DIV_EXPR:
1046 case ROUND_DIV_EXPR:
1047 case FLOOR_DIV_EXPR:
1049 case EXACT_DIV_EXPR:
1050 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1051 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1053 return negate_expr_p (TREE_OPERAND (t, 1))
1054 || negate_expr_p (TREE_OPERAND (t, 0));
1057 /* Negate -((double)float) as (double)(-float). */
1058 if (TREE_CODE (type) == REAL_TYPE)
1060 tree tem = strip_float_extensions (t);
1062 return negate_expr_p (tem);
1067 /* Negate -f(x) as f(-x). */
1068 if (negate_mathfn_p (builtin_mathfn_code (t)))
1069 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1073 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1074 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1076 tree op1 = TREE_OPERAND (t, 1);
1077 if (TREE_INT_CST_HIGH (op1) == 0
1078 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1079 == TREE_INT_CST_LOW (op1))
1090 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1091 simplification is possible.
1092 If negate_expr_p would return true for T, NULL_TREE will never be
1096 fold_negate_expr (tree t)
1098 tree type = TREE_TYPE (t);
1101 switch (TREE_CODE (t))
1103 /* Convert - (~A) to A + 1. */
1105 if (INTEGRAL_TYPE_P (type))
1106 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1107 build_int_cst (type, 1));
1111 tem = fold_negate_const (t, type);
1112 if (!TREE_OVERFLOW (tem)
1113 || !TYPE_OVERFLOW_TRAPS (type))
1118 tem = fold_negate_const (t, type);
1119 /* Two's complement FP formats, such as c4x, may overflow. */
1120 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1126 tree rpart = negate_expr (TREE_REALPART (t));
1127 tree ipart = negate_expr (TREE_IMAGPART (t));
1129 if ((TREE_CODE (rpart) == REAL_CST
1130 && TREE_CODE (ipart) == REAL_CST)
1131 || (TREE_CODE (rpart) == INTEGER_CST
1132 && TREE_CODE (ipart) == INTEGER_CST))
1133 return build_complex (type, rpart, ipart);
1138 return TREE_OPERAND (t, 0);
1141 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1142 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1144 /* -(A + B) -> (-B) - A. */
1145 if (negate_expr_p (TREE_OPERAND (t, 1))
1146 && reorder_operands_p (TREE_OPERAND (t, 0),
1147 TREE_OPERAND (t, 1)))
1149 tem = negate_expr (TREE_OPERAND (t, 1));
1150 return fold_build2 (MINUS_EXPR, type,
1151 tem, TREE_OPERAND (t, 0));
1154 /* -(A + B) -> (-A) - B. */
1155 if (negate_expr_p (TREE_OPERAND (t, 0)))
1157 tem = negate_expr (TREE_OPERAND (t, 0));
1158 return fold_build2 (MINUS_EXPR, type,
1159 tem, TREE_OPERAND (t, 1));
1165 /* - (A - B) -> B - A */
1166 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1167 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1168 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1169 return fold_build2 (MINUS_EXPR, type,
1170 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1174 if (TYPE_UNSIGNED (type))
1180 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1182 tem = TREE_OPERAND (t, 1);
1183 if (negate_expr_p (tem))
1184 return fold_build2 (TREE_CODE (t), type,
1185 TREE_OPERAND (t, 0), negate_expr (tem));
1186 tem = TREE_OPERAND (t, 0);
1187 if (negate_expr_p (tem))
1188 return fold_build2 (TREE_CODE (t), type,
1189 negate_expr (tem), TREE_OPERAND (t, 1));
1193 case TRUNC_DIV_EXPR:
1194 case ROUND_DIV_EXPR:
1195 case FLOOR_DIV_EXPR:
1197 case EXACT_DIV_EXPR:
1198 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1200 tem = TREE_OPERAND (t, 1);
1201 if (negate_expr_p (tem))
1202 return fold_build2 (TREE_CODE (t), type,
1203 TREE_OPERAND (t, 0), negate_expr (tem));
1204 tem = TREE_OPERAND (t, 0);
1205 if (negate_expr_p (tem))
1206 return fold_build2 (TREE_CODE (t), type,
1207 negate_expr (tem), TREE_OPERAND (t, 1));
1212 /* Convert -((double)float) into (double)(-float). */
1213 if (TREE_CODE (type) == REAL_TYPE)
1215 tem = strip_float_extensions (t);
1216 if (tem != t && negate_expr_p (tem))
1217 return negate_expr (tem);
1222 /* Negate -f(x) as f(-x). */
1223 if (negate_mathfn_p (builtin_mathfn_code (t))
1224 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1226 tree fndecl, arg, arglist;
1228 fndecl = get_callee_fndecl (t);
1229 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1230 arglist = build_tree_list (NULL_TREE, arg);
1231 return build_function_call_expr (fndecl, arglist);
1236 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1237 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1239 tree op1 = TREE_OPERAND (t, 1);
1240 if (TREE_INT_CST_HIGH (op1) == 0
1241 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1242 == TREE_INT_CST_LOW (op1))
1244 tree ntype = TYPE_UNSIGNED (type)
1245 ? lang_hooks.types.signed_type (type)
1246 : lang_hooks.types.unsigned_type (type);
1247 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1248 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1249 return fold_convert (type, temp);
1261 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1262 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1263 return NULL_TREE. */
1266 negate_expr (tree t)
1273 type = TREE_TYPE (t);
1274 STRIP_SIGN_NOPS (t);
1276 tem = fold_negate_expr (t);
1278 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1279 return fold_convert (type, tem);
1282 /* Split a tree IN into a constant, literal and variable parts that could be
1283 combined with CODE to make IN. "constant" means an expression with
1284 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1285 commutative arithmetic operation. Store the constant part into *CONP,
1286 the literal in *LITP and return the variable part. If a part isn't
1287 present, set it to null. If the tree does not decompose in this way,
1288 return the entire tree as the variable part and the other parts as null.
1290 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1291 case, we negate an operand that was subtracted. Except if it is a
1292 literal for which we use *MINUS_LITP instead.
1294 If NEGATE_P is true, we are negating all of IN, again except a literal
1295 for which we use *MINUS_LITP instead.
1297 If IN is itself a literal or constant, return it as appropriate.
1299 Note that we do not guarantee that any of the three values will be the
1300 same type as IN, but they will have the same signedness and mode. */
1303 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1304 tree *minus_litp, int negate_p)
1312 /* Strip any conversions that don't change the machine mode or signedness. */
1313 STRIP_SIGN_NOPS (in);
1315 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1317 else if (TREE_CODE (in) == code
1318 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1319 /* We can associate addition and subtraction together (even
1320 though the C standard doesn't say so) for integers because
1321 the value is not affected. For reals, the value might be
1322 affected, so we can't. */
1323 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1324 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1326 tree op0 = TREE_OPERAND (in, 0);
1327 tree op1 = TREE_OPERAND (in, 1);
1328 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1329 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1331 /* First see if either of the operands is a literal, then a constant. */
1332 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1333 *litp = op0, op0 = 0;
1334 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1335 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1337 if (op0 != 0 && TREE_CONSTANT (op0))
1338 *conp = op0, op0 = 0;
1339 else if (op1 != 0 && TREE_CONSTANT (op1))
1340 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1342 /* If we haven't dealt with either operand, this is not a case we can
1343 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1344 if (op0 != 0 && op1 != 0)
1349 var = op1, neg_var_p = neg1_p;
1351 /* Now do any needed negations. */
1353 *minus_litp = *litp, *litp = 0;
1355 *conp = negate_expr (*conp);
1357 var = negate_expr (var);
1359 else if (TREE_CONSTANT (in))
1367 *minus_litp = *litp, *litp = 0;
1368 else if (*minus_litp)
1369 *litp = *minus_litp, *minus_litp = 0;
1370 *conp = negate_expr (*conp);
1371 var = negate_expr (var);
1377 /* Re-associate trees split by the above function. T1 and T2 are either
1378 expressions to associate or null. Return the new expression, if any. If
1379 we build an operation, do it in TYPE and with CODE. */
1382 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1389 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1390 try to fold this since we will have infinite recursion. But do
1391 deal with any NEGATE_EXPRs. */
1392 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1393 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1395 if (code == PLUS_EXPR)
1397 if (TREE_CODE (t1) == NEGATE_EXPR)
1398 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1399 fold_convert (type, TREE_OPERAND (t1, 0)));
1400 else if (TREE_CODE (t2) == NEGATE_EXPR)
1401 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1402 fold_convert (type, TREE_OPERAND (t2, 0)));
1403 else if (integer_zerop (t2))
1404 return fold_convert (type, t1);
1406 else if (code == MINUS_EXPR)
1408 if (integer_zerop (t2))
1409 return fold_convert (type, t1);
1412 return build2 (code, type, fold_convert (type, t1),
1413 fold_convert (type, t2));
1416 return fold_build2 (code, type, fold_convert (type, t1),
1417 fold_convert (type, t2));
1420 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1421 for use in int_const_binop, size_binop and size_diffop. */
1424 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1426 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1428 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1443 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1444 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1445 && TYPE_MODE (type1) == TYPE_MODE (type2);
1449 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1450 to produce a new constant. Return NULL_TREE if we don't know how
1451 to evaluate CODE at compile-time.
1453 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1456 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1458 unsigned HOST_WIDE_INT int1l, int2l;
1459 HOST_WIDE_INT int1h, int2h;
1460 unsigned HOST_WIDE_INT low;
1462 unsigned HOST_WIDE_INT garbagel;
1463 HOST_WIDE_INT garbageh;
1465 tree type = TREE_TYPE (arg1);
1466 int uns = TYPE_UNSIGNED (type);
1468 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1471 int1l = TREE_INT_CST_LOW (arg1);
1472 int1h = TREE_INT_CST_HIGH (arg1);
1473 int2l = TREE_INT_CST_LOW (arg2);
1474 int2h = TREE_INT_CST_HIGH (arg2);
1479 low = int1l | int2l, hi = int1h | int2h;
1483 low = int1l ^ int2l, hi = int1h ^ int2h;
1487 low = int1l & int2l, hi = int1h & int2h;
1493 /* It's unclear from the C standard whether shifts can overflow.
1494 The following code ignores overflow; perhaps a C standard
1495 interpretation ruling is needed. */
1496 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1503 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1508 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1512 neg_double (int2l, int2h, &low, &hi);
1513 add_double (int1l, int1h, low, hi, &low, &hi);
1514 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1518 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1521 case TRUNC_DIV_EXPR:
1522 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1523 case EXACT_DIV_EXPR:
1524 /* This is a shortcut for a common special case. */
1525 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1526 && !TREE_OVERFLOW (arg1)
1527 && !TREE_OVERFLOW (arg2)
1528 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1530 if (code == CEIL_DIV_EXPR)
1533 low = int1l / int2l, hi = 0;
1537 /* ... fall through ... */
1539 case ROUND_DIV_EXPR:
1540 if (int2h == 0 && int2l == 0)
1542 if (int2h == 0 && int2l == 1)
1544 low = int1l, hi = int1h;
1547 if (int1l == int2l && int1h == int2h
1548 && ! (int1l == 0 && int1h == 0))
1553 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1554 &low, &hi, &garbagel, &garbageh);
1557 case TRUNC_MOD_EXPR:
1558 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1559 /* This is a shortcut for a common special case. */
1560 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1561 && !TREE_OVERFLOW (arg1)
1562 && !TREE_OVERFLOW (arg2)
1563 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1565 if (code == CEIL_MOD_EXPR)
1567 low = int1l % int2l, hi = 0;
1571 /* ... fall through ... */
1573 case ROUND_MOD_EXPR:
1574 if (int2h == 0 && int2l == 0)
1576 overflow = div_and_round_double (code, uns,
1577 int1l, int1h, int2l, int2h,
1578 &garbagel, &garbageh, &low, &hi);
1584 low = (((unsigned HOST_WIDE_INT) int1h
1585 < (unsigned HOST_WIDE_INT) int2h)
1586 || (((unsigned HOST_WIDE_INT) int1h
1587 == (unsigned HOST_WIDE_INT) int2h)
1590 low = (int1h < int2h
1591 || (int1h == int2h && int1l < int2l));
1593 if (low == (code == MIN_EXPR))
1594 low = int1l, hi = int1h;
1596 low = int2l, hi = int2h;
1605 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1607 /* Propagate overflow flags ourselves. */
1608 if (((!uns || is_sizetype) && overflow)
1609 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1612 TREE_OVERFLOW (t) = 1;
1616 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1617 ((!uns || is_sizetype) && overflow)
1618 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1623 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1624 constant. We assume ARG1 and ARG2 have the same data type, or at least
1625 are the same kind of constant and the same machine mode. Return zero if
1626 combining the constants is not allowed in the current operating mode.
1628 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1631 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1633 /* Sanity check for the recursive cases. */
1640 if (TREE_CODE (arg1) == INTEGER_CST)
1641 return int_const_binop (code, arg1, arg2, notrunc);
1643 if (TREE_CODE (arg1) == REAL_CST)
1645 enum machine_mode mode;
1648 REAL_VALUE_TYPE value;
1649 REAL_VALUE_TYPE result;
1653 /* The following codes are handled by real_arithmetic. */
1668 d1 = TREE_REAL_CST (arg1);
1669 d2 = TREE_REAL_CST (arg2);
1671 type = TREE_TYPE (arg1);
1672 mode = TYPE_MODE (type);
1674 /* Don't perform operation if we honor signaling NaNs and
1675 either operand is a NaN. */
1676 if (HONOR_SNANS (mode)
1677 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1680 /* Don't perform operation if it would raise a division
1681 by zero exception. */
1682 if (code == RDIV_EXPR
1683 && REAL_VALUES_EQUAL (d2, dconst0)
1684 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1687 /* If either operand is a NaN, just return it. Otherwise, set up
1688 for floating-point trap; we return an overflow. */
1689 if (REAL_VALUE_ISNAN (d1))
1691 else if (REAL_VALUE_ISNAN (d2))
1694 inexact = real_arithmetic (&value, code, &d1, &d2);
1695 real_convert (&result, mode, &value);
1697 /* Don't constant fold this floating point operation if
1698 the result has overflowed and flag_trapping_math. */
1699 if (flag_trapping_math
1700 && MODE_HAS_INFINITIES (mode)
1701 && REAL_VALUE_ISINF (result)
1702 && !REAL_VALUE_ISINF (d1)
1703 && !REAL_VALUE_ISINF (d2))
1706 /* Don't constant fold this floating point operation if the
1707 result may dependent upon the run-time rounding mode and
1708 flag_rounding_math is set, or if GCC's software emulation
1709 is unable to accurately represent the result. */
1710 if ((flag_rounding_math
1711 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1712 && !flag_unsafe_math_optimizations))
1713 && (inexact || !real_identical (&result, &value)))
1716 t = build_real (type, result);
1718 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1722 if (TREE_CODE (arg1) == COMPLEX_CST)
1724 tree type = TREE_TYPE (arg1);
1725 tree r1 = TREE_REALPART (arg1);
1726 tree i1 = TREE_IMAGPART (arg1);
1727 tree r2 = TREE_REALPART (arg2);
1728 tree i2 = TREE_IMAGPART (arg2);
1735 real = const_binop (code, r1, r2, notrunc);
1736 imag = const_binop (code, i1, i2, notrunc);
1740 real = const_binop (MINUS_EXPR,
1741 const_binop (MULT_EXPR, r1, r2, notrunc),
1742 const_binop (MULT_EXPR, i1, i2, notrunc),
1744 imag = const_binop (PLUS_EXPR,
1745 const_binop (MULT_EXPR, r1, i2, notrunc),
1746 const_binop (MULT_EXPR, i1, r2, notrunc),
1753 = const_binop (PLUS_EXPR,
1754 const_binop (MULT_EXPR, r2, r2, notrunc),
1755 const_binop (MULT_EXPR, i2, i2, notrunc),
1758 = const_binop (PLUS_EXPR,
1759 const_binop (MULT_EXPR, r1, r2, notrunc),
1760 const_binop (MULT_EXPR, i1, i2, notrunc),
1763 = const_binop (MINUS_EXPR,
1764 const_binop (MULT_EXPR, i1, r2, notrunc),
1765 const_binop (MULT_EXPR, r1, i2, notrunc),
1768 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1769 code = TRUNC_DIV_EXPR;
1771 real = const_binop (code, t1, magsquared, notrunc);
1772 imag = const_binop (code, t2, magsquared, notrunc);
1781 return build_complex (type, real, imag);
1787 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1788 indicates which particular sizetype to create. */
1791 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1793 return build_int_cst (sizetype_tab[(int) kind], number);
1796 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1797 is a tree code. The type of the result is taken from the operands.
1798 Both must be equivalent integer types, ala int_binop_types_match_p.
1799 If the operands are constant, so is the result. */
1802 size_binop (enum tree_code code, tree arg0, tree arg1)
1804 tree type = TREE_TYPE (arg0);
1806 if (arg0 == error_mark_node || arg1 == error_mark_node)
1807 return error_mark_node;
1809 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1812 /* Handle the special case of two integer constants faster. */
1813 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1815 /* And some specific cases even faster than that. */
1816 if (code == PLUS_EXPR)
1818 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1820 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1823 else if (code == MINUS_EXPR)
1825 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1828 else if (code == MULT_EXPR)
1830 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1834 /* Handle general case of two integer constants. */
1835 return int_const_binop (code, arg0, arg1, 0);
1838 return fold_build2 (code, type, arg0, arg1);
1841 /* Given two values, either both of sizetype or both of bitsizetype,
1842 compute the difference between the two values. Return the value
1843 in signed type corresponding to the type of the operands. */
1846 size_diffop (tree arg0, tree arg1)
1848 tree type = TREE_TYPE (arg0);
1851 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1854 /* If the type is already signed, just do the simple thing. */
1855 if (!TYPE_UNSIGNED (type))
1856 return size_binop (MINUS_EXPR, arg0, arg1);
1858 if (type == sizetype)
1860 else if (type == bitsizetype)
1861 ctype = sbitsizetype;
1863 ctype = lang_hooks.types.signed_type (type);
1865 /* If either operand is not a constant, do the conversions to the signed
1866 type and subtract. The hardware will do the right thing with any
1867 overflow in the subtraction. */
1868 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1869 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1870 fold_convert (ctype, arg1));
1872 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1873 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1874 overflow) and negate (which can't either). Special-case a result
1875 of zero while we're here. */
1876 if (tree_int_cst_equal (arg0, arg1))
1877 return build_int_cst (ctype, 0);
1878 else if (tree_int_cst_lt (arg1, arg0))
1879 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1881 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1882 fold_convert (ctype, size_binop (MINUS_EXPR,
1886 /* A subroutine of fold_convert_const handling conversions of an
1887 INTEGER_CST to another integer type. */
1890 fold_convert_const_int_from_int (tree type, tree arg1)
1894 /* Given an integer constant, make new constant with new type,
1895 appropriately sign-extended or truncated. */
1896 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1897 TREE_INT_CST_HIGH (arg1),
1898 /* Don't set the overflow when
1899 converting a pointer */
1900 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1901 (TREE_INT_CST_HIGH (arg1) < 0
1902 && (TYPE_UNSIGNED (type)
1903 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1904 | TREE_OVERFLOW (arg1));
1909 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1910 to an integer type. */
1913 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1918 /* The following code implements the floating point to integer
1919 conversion rules required by the Java Language Specification,
1920 that IEEE NaNs are mapped to zero and values that overflow
1921 the target precision saturate, i.e. values greater than
1922 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1923 are mapped to INT_MIN. These semantics are allowed by the
1924 C and C++ standards that simply state that the behavior of
1925 FP-to-integer conversion is unspecified upon overflow. */
1927 HOST_WIDE_INT high, low;
1929 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1933 case FIX_TRUNC_EXPR:
1934 real_trunc (&r, VOIDmode, &x);
1941 /* If R is NaN, return zero and show we have an overflow. */
1942 if (REAL_VALUE_ISNAN (r))
1949 /* See if R is less than the lower bound or greater than the
1954 tree lt = TYPE_MIN_VALUE (type);
1955 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1956 if (REAL_VALUES_LESS (r, l))
1959 high = TREE_INT_CST_HIGH (lt);
1960 low = TREE_INT_CST_LOW (lt);
1966 tree ut = TYPE_MAX_VALUE (type);
1969 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1970 if (REAL_VALUES_LESS (u, r))
1973 high = TREE_INT_CST_HIGH (ut);
1974 low = TREE_INT_CST_LOW (ut);
1980 REAL_VALUE_TO_INT (&low, &high, r);
1982 t = force_fit_type_double (type, low, high, -1,
1983 overflow | TREE_OVERFLOW (arg1));
1987 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1988 to another floating point type. */
1991 fold_convert_const_real_from_real (tree type, tree arg1)
1993 REAL_VALUE_TYPE value;
1996 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1997 t = build_real (type, value);
1999 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2003 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2004 type TYPE. If no simplification can be done return NULL_TREE. */
2007 fold_convert_const (enum tree_code code, tree type, tree arg1)
2009 if (TREE_TYPE (arg1) == type)
2012 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2014 if (TREE_CODE (arg1) == INTEGER_CST)
2015 return fold_convert_const_int_from_int (type, arg1);
2016 else if (TREE_CODE (arg1) == REAL_CST)
2017 return fold_convert_const_int_from_real (code, type, arg1);
2019 else if (TREE_CODE (type) == REAL_TYPE)
2021 if (TREE_CODE (arg1) == INTEGER_CST)
2022 return build_real_from_int_cst (type, arg1);
2023 if (TREE_CODE (arg1) == REAL_CST)
2024 return fold_convert_const_real_from_real (type, arg1);
2029 /* Construct a vector of zero elements of vector type TYPE. */
2032 build_zero_vector (tree type)
2037 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2038 units = TYPE_VECTOR_SUBPARTS (type);
2041 for (i = 0; i < units; i++)
2042 list = tree_cons (NULL_TREE, elem, list);
2043 return build_vector (type, list);
2046 /* Convert expression ARG to type TYPE. Used by the middle-end for
2047 simple conversions in preference to calling the front-end's convert. */
2050 fold_convert (tree type, tree arg)
2052 tree orig = TREE_TYPE (arg);
2058 if (TREE_CODE (arg) == ERROR_MARK
2059 || TREE_CODE (type) == ERROR_MARK
2060 || TREE_CODE (orig) == ERROR_MARK)
2061 return error_mark_node;
2063 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2064 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2065 TYPE_MAIN_VARIANT (orig)))
2066 return fold_build1 (NOP_EXPR, type, arg);
2068 switch (TREE_CODE (type))
2070 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2071 case POINTER_TYPE: case REFERENCE_TYPE:
2073 if (TREE_CODE (arg) == INTEGER_CST)
2075 tem = fold_convert_const (NOP_EXPR, type, arg);
2076 if (tem != NULL_TREE)
2079 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2080 || TREE_CODE (orig) == OFFSET_TYPE)
2081 return fold_build1 (NOP_EXPR, type, arg);
2082 if (TREE_CODE (orig) == COMPLEX_TYPE)
2084 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2085 return fold_convert (type, tem);
2087 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2088 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2089 return fold_build1 (NOP_EXPR, type, arg);
2092 if (TREE_CODE (arg) == INTEGER_CST)
2094 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2095 if (tem != NULL_TREE)
2098 else if (TREE_CODE (arg) == REAL_CST)
2100 tem = fold_convert_const (NOP_EXPR, type, arg);
2101 if (tem != NULL_TREE)
2105 switch (TREE_CODE (orig))
2108 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2109 case POINTER_TYPE: case REFERENCE_TYPE:
2110 return fold_build1 (FLOAT_EXPR, type, arg);
2113 return fold_build1 (NOP_EXPR, type, arg);
2116 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2117 return fold_convert (type, tem);
2124 switch (TREE_CODE (orig))
2127 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2128 case POINTER_TYPE: case REFERENCE_TYPE:
2130 return build2 (COMPLEX_EXPR, type,
2131 fold_convert (TREE_TYPE (type), arg),
2132 fold_convert (TREE_TYPE (type), integer_zero_node));
2137 if (TREE_CODE (arg) == COMPLEX_EXPR)
2139 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2140 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2141 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2144 arg = save_expr (arg);
2145 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2146 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2147 rpart = fold_convert (TREE_TYPE (type), rpart);
2148 ipart = fold_convert (TREE_TYPE (type), ipart);
2149 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2157 if (integer_zerop (arg))
2158 return build_zero_vector (type);
2159 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2160 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2161 || TREE_CODE (orig) == VECTOR_TYPE);
2162 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2165 tem = fold_ignored_result (arg);
2166 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2168 return fold_build1 (NOP_EXPR, type, tem);
2175 /* Return false if expr can be assumed not to be an lvalue, true
2179 maybe_lvalue_p (tree x)
2181 /* We only need to wrap lvalue tree codes. */
2182 switch (TREE_CODE (x))
2193 case ALIGN_INDIRECT_REF:
2194 case MISALIGNED_INDIRECT_REF:
2196 case ARRAY_RANGE_REF:
2202 case PREINCREMENT_EXPR:
2203 case PREDECREMENT_EXPR:
2205 case TRY_CATCH_EXPR:
2206 case WITH_CLEANUP_EXPR:
2209 case GIMPLE_MODIFY_STMT:
2218 /* Assume the worst for front-end tree codes. */
2219 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2227 /* Return an expr equal to X but certainly not valid as an lvalue. */
2232 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2237 if (! maybe_lvalue_p (x))
2239 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2242 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2243 Zero means allow extended lvalues. */
2245 int pedantic_lvalues;
2247 /* When pedantic, return an expr equal to X but certainly not valid as a
2248 pedantic lvalue. Otherwise, return X. */
2251 pedantic_non_lvalue (tree x)
2253 if (pedantic_lvalues)
2254 return non_lvalue (x);
2259 /* Given a tree comparison code, return the code that is the logical inverse
2260 of the given code. It is not safe to do this for floating-point
2261 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2262 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2265 invert_tree_comparison (enum tree_code code, bool honor_nans)
2267 if (honor_nans && flag_trapping_math)
2277 return honor_nans ? UNLE_EXPR : LE_EXPR;
2279 return honor_nans ? UNLT_EXPR : LT_EXPR;
2281 return honor_nans ? UNGE_EXPR : GE_EXPR;
2283 return honor_nans ? UNGT_EXPR : GT_EXPR;
2297 return UNORDERED_EXPR;
2298 case UNORDERED_EXPR:
2299 return ORDERED_EXPR;
2305 /* Similar, but return the comparison that results if the operands are
2306 swapped. This is safe for floating-point. */
2309 swap_tree_comparison (enum tree_code code)
2316 case UNORDERED_EXPR:
2342 /* Convert a comparison tree code from an enum tree_code representation
2343 into a compcode bit-based encoding. This function is the inverse of
2344 compcode_to_comparison. */
2346 static enum comparison_code
2347 comparison_to_compcode (enum tree_code code)
2364 return COMPCODE_ORD;
2365 case UNORDERED_EXPR:
2366 return COMPCODE_UNORD;
2368 return COMPCODE_UNLT;
2370 return COMPCODE_UNEQ;
2372 return COMPCODE_UNLE;
2374 return COMPCODE_UNGT;
2376 return COMPCODE_LTGT;
2378 return COMPCODE_UNGE;
2384 /* Convert a compcode bit-based encoding of a comparison operator back
2385 to GCC's enum tree_code representation. This function is the
2386 inverse of comparison_to_compcode. */
2388 static enum tree_code
2389 compcode_to_comparison (enum comparison_code code)
2406 return ORDERED_EXPR;
2407 case COMPCODE_UNORD:
2408 return UNORDERED_EXPR;
2426 /* Return a tree for the comparison which is the combination of
2427 doing the AND or OR (depending on CODE) of the two operations LCODE
2428 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2429 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2430 if this makes the transformation invalid. */
2433 combine_comparisons (enum tree_code code, enum tree_code lcode,
2434 enum tree_code rcode, tree truth_type,
2435 tree ll_arg, tree lr_arg)
2437 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2438 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2439 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2440 enum comparison_code compcode;
2444 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2445 compcode = lcompcode & rcompcode;
2448 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2449 compcode = lcompcode | rcompcode;
2458 /* Eliminate unordered comparisons, as well as LTGT and ORD
2459 which are not used unless the mode has NaNs. */
2460 compcode &= ~COMPCODE_UNORD;
2461 if (compcode == COMPCODE_LTGT)
2462 compcode = COMPCODE_NE;
2463 else if (compcode == COMPCODE_ORD)
2464 compcode = COMPCODE_TRUE;
2466 else if (flag_trapping_math)
2468 /* Check that the original operation and the optimized ones will trap
2469 under the same condition. */
2470 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2471 && (lcompcode != COMPCODE_EQ)
2472 && (lcompcode != COMPCODE_ORD);
2473 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2474 && (rcompcode != COMPCODE_EQ)
2475 && (rcompcode != COMPCODE_ORD);
2476 bool trap = (compcode & COMPCODE_UNORD) == 0
2477 && (compcode != COMPCODE_EQ)
2478 && (compcode != COMPCODE_ORD);
2480 /* In a short-circuited boolean expression the LHS might be
2481 such that the RHS, if evaluated, will never trap. For
2482 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2483 if neither x nor y is NaN. (This is a mixed blessing: for
2484 example, the expression above will never trap, hence
2485 optimizing it to x < y would be invalid). */
2486 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2487 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2490 /* If the comparison was short-circuited, and only the RHS
2491 trapped, we may now generate a spurious trap. */
2493 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2496 /* If we changed the conditions that cause a trap, we lose. */
2497 if ((ltrap || rtrap) != trap)
2501 if (compcode == COMPCODE_TRUE)
2502 return constant_boolean_node (true, truth_type);
2503 else if (compcode == COMPCODE_FALSE)
2504 return constant_boolean_node (false, truth_type);
2506 return fold_build2 (compcode_to_comparison (compcode),
2507 truth_type, ll_arg, lr_arg);
2510 /* Return nonzero if CODE is a tree code that represents a truth value. */
2513 truth_value_p (enum tree_code code)
2515 return (TREE_CODE_CLASS (code) == tcc_comparison
2516 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2517 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2518 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2521 /* Return nonzero if two operands (typically of the same tree node)
2522 are necessarily equal. If either argument has side-effects this
2523 function returns zero. FLAGS modifies behavior as follows:
2525 If OEP_ONLY_CONST is set, only return nonzero for constants.
2526 This function tests whether the operands are indistinguishable;
2527 it does not test whether they are equal using C's == operation.
2528 The distinction is important for IEEE floating point, because
2529 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2530 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2532 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2533 even though it may hold multiple values during a function.
2534 This is because a GCC tree node guarantees that nothing else is
2535 executed between the evaluation of its "operands" (which may often
2536 be evaluated in arbitrary order). Hence if the operands themselves
2537 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2538 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2539 unset means assuming isochronic (or instantaneous) tree equivalence.
2540 Unless comparing arbitrary expression trees, such as from different
2541 statements, this flag can usually be left unset.
2543 If OEP_PURE_SAME is set, then pure functions with identical arguments
2544 are considered the same. It is used when the caller has other ways
2545 to ensure that global memory is unchanged in between. */
2548 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2550 /* If either is ERROR_MARK, they aren't equal. */
2551 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2554 /* If both types don't have the same signedness, then we can't consider
2555 them equal. We must check this before the STRIP_NOPS calls
2556 because they may change the signedness of the arguments. */
2557 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2560 /* If both types don't have the same precision, then it is not safe
2562 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2568 /* In case both args are comparisons but with different comparison
2569 code, try to swap the comparison operands of one arg to produce
2570 a match and compare that variant. */
2571 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2572 && COMPARISON_CLASS_P (arg0)
2573 && COMPARISON_CLASS_P (arg1))
2575 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2577 if (TREE_CODE (arg0) == swap_code)
2578 return operand_equal_p (TREE_OPERAND (arg0, 0),
2579 TREE_OPERAND (arg1, 1), flags)
2580 && operand_equal_p (TREE_OPERAND (arg0, 1),
2581 TREE_OPERAND (arg1, 0), flags);
2584 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2585 /* This is needed for conversions and for COMPONENT_REF.
2586 Might as well play it safe and always test this. */
2587 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2588 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2589 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2592 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2593 We don't care about side effects in that case because the SAVE_EXPR
2594 takes care of that for us. In all other cases, two expressions are
2595 equal if they have no side effects. If we have two identical
2596 expressions with side effects that should be treated the same due
2597 to the only side effects being identical SAVE_EXPR's, that will
2598 be detected in the recursive calls below. */
2599 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2600 && (TREE_CODE (arg0) == SAVE_EXPR
2601 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2604 /* Next handle constant cases, those for which we can return 1 even
2605 if ONLY_CONST is set. */
2606 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2607 switch (TREE_CODE (arg0))
2610 return tree_int_cst_equal (arg0, arg1);
2613 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2614 TREE_REAL_CST (arg1)))
2618 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2620 /* If we do not distinguish between signed and unsigned zero,
2621 consider them equal. */
2622 if (real_zerop (arg0) && real_zerop (arg1))
2631 v1 = TREE_VECTOR_CST_ELTS (arg0);
2632 v2 = TREE_VECTOR_CST_ELTS (arg1);
2635 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2638 v1 = TREE_CHAIN (v1);
2639 v2 = TREE_CHAIN (v2);
2646 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2648 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2652 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2653 && ! memcmp (TREE_STRING_POINTER (arg0),
2654 TREE_STRING_POINTER (arg1),
2655 TREE_STRING_LENGTH (arg0)));
2658 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2664 if (flags & OEP_ONLY_CONST)
2667 /* Define macros to test an operand from arg0 and arg1 for equality and a
2668 variant that allows null and views null as being different from any
2669 non-null value. In the latter case, if either is null, the both
2670 must be; otherwise, do the normal comparison. */
2671 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2672 TREE_OPERAND (arg1, N), flags)
2674 #define OP_SAME_WITH_NULL(N) \
2675 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2676 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2678 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2681 /* Two conversions are equal only if signedness and modes match. */
2682 switch (TREE_CODE (arg0))
2686 case FIX_TRUNC_EXPR:
2687 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2688 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2698 case tcc_comparison:
2700 if (OP_SAME (0) && OP_SAME (1))
2703 /* For commutative ops, allow the other order. */
2704 return (commutative_tree_code (TREE_CODE (arg0))
2705 && operand_equal_p (TREE_OPERAND (arg0, 0),
2706 TREE_OPERAND (arg1, 1), flags)
2707 && operand_equal_p (TREE_OPERAND (arg0, 1),
2708 TREE_OPERAND (arg1, 0), flags));
2711 /* If either of the pointer (or reference) expressions we are
2712 dereferencing contain a side effect, these cannot be equal. */
2713 if (TREE_SIDE_EFFECTS (arg0)
2714 || TREE_SIDE_EFFECTS (arg1))
2717 switch (TREE_CODE (arg0))
2720 case ALIGN_INDIRECT_REF:
2721 case MISALIGNED_INDIRECT_REF:
2727 case ARRAY_RANGE_REF:
2728 /* Operands 2 and 3 may be null. */
2731 && OP_SAME_WITH_NULL (2)
2732 && OP_SAME_WITH_NULL (3));
2735 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2736 may be NULL when we're called to compare MEM_EXPRs. */
2737 return OP_SAME_WITH_NULL (0)
2739 && OP_SAME_WITH_NULL (2);
2742 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2748 case tcc_expression:
2749 switch (TREE_CODE (arg0))
2752 case TRUTH_NOT_EXPR:
2755 case TRUTH_ANDIF_EXPR:
2756 case TRUTH_ORIF_EXPR:
2757 return OP_SAME (0) && OP_SAME (1);
2759 case TRUTH_AND_EXPR:
2761 case TRUTH_XOR_EXPR:
2762 if (OP_SAME (0) && OP_SAME (1))
2765 /* Otherwise take into account this is a commutative operation. */
2766 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2767 TREE_OPERAND (arg1, 1), flags)
2768 && operand_equal_p (TREE_OPERAND (arg0, 1),
2769 TREE_OPERAND (arg1, 0), flags));
2772 /* If the CALL_EXPRs call different functions, then they
2773 clearly can not be equal. */
2778 unsigned int cef = call_expr_flags (arg0);
2779 if (flags & OEP_PURE_SAME)
2780 cef &= ECF_CONST | ECF_PURE;
2787 /* Now see if all the arguments are the same. operand_equal_p
2788 does not handle TREE_LIST, so we walk the operands here
2789 feeding them to operand_equal_p. */
2790 arg0 = TREE_OPERAND (arg0, 1);
2791 arg1 = TREE_OPERAND (arg1, 1);
2792 while (arg0 && arg1)
2794 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2798 arg0 = TREE_CHAIN (arg0);
2799 arg1 = TREE_CHAIN (arg1);
2802 /* If we get here and both argument lists are exhausted
2803 then the CALL_EXPRs are equal. */
2804 return ! (arg0 || arg1);
2810 case tcc_declaration:
2811 /* Consider __builtin_sqrt equal to sqrt. */
2812 return (TREE_CODE (arg0) == FUNCTION_DECL
2813 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2814 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2815 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2822 #undef OP_SAME_WITH_NULL
2825 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2826 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2828 When in doubt, return 0. */
2831 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2833 int unsignedp1, unsignedpo;
2834 tree primarg0, primarg1, primother;
2835 unsigned int correct_width;
2837 if (operand_equal_p (arg0, arg1, 0))
2840 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2841 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2844 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2845 and see if the inner values are the same. This removes any
2846 signedness comparison, which doesn't matter here. */
2847 primarg0 = arg0, primarg1 = arg1;
2848 STRIP_NOPS (primarg0);
2849 STRIP_NOPS (primarg1);
2850 if (operand_equal_p (primarg0, primarg1, 0))
2853 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2854 actual comparison operand, ARG0.
2856 First throw away any conversions to wider types
2857 already present in the operands. */
2859 primarg1 = get_narrower (arg1, &unsignedp1);
2860 primother = get_narrower (other, &unsignedpo);
2862 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2863 if (unsignedp1 == unsignedpo
2864 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2865 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2867 tree type = TREE_TYPE (arg0);
2869 /* Make sure shorter operand is extended the right way
2870 to match the longer operand. */
2871 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2872 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2874 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2881 /* See if ARG is an expression that is either a comparison or is performing
2882 arithmetic on comparisons. The comparisons must only be comparing
2883 two different values, which will be stored in *CVAL1 and *CVAL2; if
2884 they are nonzero it means that some operands have already been found.
2885 No variables may be used anywhere else in the expression except in the
2886 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2887 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2889 If this is true, return 1. Otherwise, return zero. */
2892 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2894 enum tree_code code = TREE_CODE (arg);
2895 enum tree_code_class class = TREE_CODE_CLASS (code);
2897 /* We can handle some of the tcc_expression cases here. */
2898 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2900 else if (class == tcc_expression
2901 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2902 || code == COMPOUND_EXPR))
2905 else if (class == tcc_expression && code == SAVE_EXPR
2906 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2908 /* If we've already found a CVAL1 or CVAL2, this expression is
2909 two complex to handle. */
2910 if (*cval1 || *cval2)
2920 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2923 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2924 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2925 cval1, cval2, save_p));
2930 case tcc_expression:
2931 if (code == COND_EXPR)
2932 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2933 cval1, cval2, save_p)
2934 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2935 cval1, cval2, save_p)
2936 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2937 cval1, cval2, save_p));
2940 case tcc_comparison:
2941 /* First see if we can handle the first operand, then the second. For
2942 the second operand, we know *CVAL1 can't be zero. It must be that
2943 one side of the comparison is each of the values; test for the
2944 case where this isn't true by failing if the two operands
2947 if (operand_equal_p (TREE_OPERAND (arg, 0),
2948 TREE_OPERAND (arg, 1), 0))
2952 *cval1 = TREE_OPERAND (arg, 0);
2953 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2955 else if (*cval2 == 0)
2956 *cval2 = TREE_OPERAND (arg, 0);
2957 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2962 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2964 else if (*cval2 == 0)
2965 *cval2 = TREE_OPERAND (arg, 1);
2966 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2978 /* ARG is a tree that is known to contain just arithmetic operations and
2979 comparisons. Evaluate the operations in the tree substituting NEW0 for
2980 any occurrence of OLD0 as an operand of a comparison and likewise for
2984 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2986 tree type = TREE_TYPE (arg);
2987 enum tree_code code = TREE_CODE (arg);
2988 enum tree_code_class class = TREE_CODE_CLASS (code);
2990 /* We can handle some of the tcc_expression cases here. */
2991 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2993 else if (class == tcc_expression
2994 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3000 return fold_build1 (code, type,
3001 eval_subst (TREE_OPERAND (arg, 0),
3002 old0, new0, old1, new1));
3005 return fold_build2 (code, type,
3006 eval_subst (TREE_OPERAND (arg, 0),
3007 old0, new0, old1, new1),
3008 eval_subst (TREE_OPERAND (arg, 1),
3009 old0, new0, old1, new1));
3011 case tcc_expression:
3015 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3018 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3021 return fold_build3 (code, type,
3022 eval_subst (TREE_OPERAND (arg, 0),
3023 old0, new0, old1, new1),
3024 eval_subst (TREE_OPERAND (arg, 1),
3025 old0, new0, old1, new1),
3026 eval_subst (TREE_OPERAND (arg, 2),
3027 old0, new0, old1, new1));
3031 /* Fall through - ??? */
3033 case tcc_comparison:
3035 tree arg0 = TREE_OPERAND (arg, 0);
3036 tree arg1 = TREE_OPERAND (arg, 1);
3038 /* We need to check both for exact equality and tree equality. The
3039 former will be true if the operand has a side-effect. In that
3040 case, we know the operand occurred exactly once. */
3042 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3044 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3047 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3049 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3052 return fold_build2 (code, type, arg0, arg1);
3060 /* Return a tree for the case when the result of an expression is RESULT
3061 converted to TYPE and OMITTED was previously an operand of the expression
3062 but is now not needed (e.g., we folded OMITTED * 0).
3064 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3065 the conversion of RESULT to TYPE. */
3068 omit_one_operand (tree type, tree result, tree omitted)
3070 tree t = fold_convert (type, result);
3072 if (TREE_SIDE_EFFECTS (omitted))
3073 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3075 return non_lvalue (t);
3078 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3081 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3083 tree t = fold_convert (type, result);
3085 if (TREE_SIDE_EFFECTS (omitted))
3086 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3088 return pedantic_non_lvalue (t);
3091 /* Return a tree for the case when the result of an expression is RESULT
3092 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3093 of the expression but are now not needed.
3095 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3096 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3097 evaluated before OMITTED2. Otherwise, if neither has side effects,
3098 just do the conversion of RESULT to TYPE. */
3101 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3103 tree t = fold_convert (type, result);
3105 if (TREE_SIDE_EFFECTS (omitted2))
3106 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3107 if (TREE_SIDE_EFFECTS (omitted1))
3108 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3110 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3114 /* Return a simplified tree node for the truth-negation of ARG. This
3115 never alters ARG itself. We assume that ARG is an operation that
3116 returns a truth value (0 or 1).
3118 FIXME: one would think we would fold the result, but it causes
3119 problems with the dominator optimizer. */
3122 fold_truth_not_expr (tree arg)
3124 tree type = TREE_TYPE (arg);
3125 enum tree_code code = TREE_CODE (arg);
3127 /* If this is a comparison, we can simply invert it, except for
3128 floating-point non-equality comparisons, in which case we just
3129 enclose a TRUTH_NOT_EXPR around what we have. */
3131 if (TREE_CODE_CLASS (code) == tcc_comparison)
3133 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3134 if (FLOAT_TYPE_P (op_type)
3135 && flag_trapping_math
3136 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3137 && code != NE_EXPR && code != EQ_EXPR)
3141 code = invert_tree_comparison (code,
3142 HONOR_NANS (TYPE_MODE (op_type)));
3143 if (code == ERROR_MARK)
3146 return build2 (code, type,
3147 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3154 return constant_boolean_node (integer_zerop (arg), type);
3156 case TRUTH_AND_EXPR:
3157 return build2 (TRUTH_OR_EXPR, type,
3158 invert_truthvalue (TREE_OPERAND (arg, 0)),
3159 invert_truthvalue (TREE_OPERAND (arg, 1)));
3162 return build2 (TRUTH_AND_EXPR, type,
3163 invert_truthvalue (TREE_OPERAND (arg, 0)),
3164 invert_truthvalue (TREE_OPERAND (arg, 1)));
3166 case TRUTH_XOR_EXPR:
3167 /* Here we can invert either operand. We invert the first operand
3168 unless the second operand is a TRUTH_NOT_EXPR in which case our
3169 result is the XOR of the first operand with the inside of the
3170 negation of the second operand. */
3172 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3173 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3174 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3176 return build2 (TRUTH_XOR_EXPR, type,
3177 invert_truthvalue (TREE_OPERAND (arg, 0)),
3178 TREE_OPERAND (arg, 1));
3180 case TRUTH_ANDIF_EXPR:
3181 return build2 (TRUTH_ORIF_EXPR, type,
3182 invert_truthvalue (TREE_OPERAND (arg, 0)),
3183 invert_truthvalue (TREE_OPERAND (arg, 1)));
3185 case TRUTH_ORIF_EXPR:
3186 return build2 (TRUTH_ANDIF_EXPR, type,
3187 invert_truthvalue (TREE_OPERAND (arg, 0)),
3188 invert_truthvalue (TREE_OPERAND (arg, 1)));
3190 case TRUTH_NOT_EXPR:
3191 return TREE_OPERAND (arg, 0);
3195 tree arg1 = TREE_OPERAND (arg, 1);
3196 tree arg2 = TREE_OPERAND (arg, 2);
3197 /* A COND_EXPR may have a throw as one operand, which
3198 then has void type. Just leave void operands
3200 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3201 VOID_TYPE_P (TREE_TYPE (arg1))
3202 ? arg1 : invert_truthvalue (arg1),
3203 VOID_TYPE_P (TREE_TYPE (arg2))
3204 ? arg2 : invert_truthvalue (arg2));
3208 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3209 invert_truthvalue (TREE_OPERAND (arg, 1)));
3211 case NON_LVALUE_EXPR:
3212 return invert_truthvalue (TREE_OPERAND (arg, 0));
3215 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3216 return build1 (TRUTH_NOT_EXPR, type, arg);
3220 return build1 (TREE_CODE (arg), type,
3221 invert_truthvalue (TREE_OPERAND (arg, 0)));
3224 if (!integer_onep (TREE_OPERAND (arg, 1)))
3226 return build2 (EQ_EXPR, type, arg,
3227 build_int_cst (type, 0));
3230 return build1 (TRUTH_NOT_EXPR, type, arg);
3232 case CLEANUP_POINT_EXPR:
3233 return build1 (CLEANUP_POINT_EXPR, type,
3234 invert_truthvalue (TREE_OPERAND (arg, 0)));
3243 /* Return a simplified tree node for the truth-negation of ARG. This
3244 never alters ARG itself. We assume that ARG is an operation that
3245 returns a truth value (0 or 1).
3247 FIXME: one would think we would fold the result, but it causes
3248 problems with the dominator optimizer. */
3251 invert_truthvalue (tree arg)
3255 if (TREE_CODE (arg) == ERROR_MARK)
3258 tem = fold_truth_not_expr (arg);
3260 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3265 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3266 operands are another bit-wise operation with a common input. If so,
3267 distribute the bit operations to save an operation and possibly two if
3268 constants are involved. For example, convert
3269 (A | B) & (A | C) into A | (B & C)
3270 Further simplification will occur if B and C are constants.
3272 If this optimization cannot be done, 0 will be returned. */
3275 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3280 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3281 || TREE_CODE (arg0) == code
3282 || (TREE_CODE (arg0) != BIT_AND_EXPR
3283 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3286 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3288 common = TREE_OPERAND (arg0, 0);
3289 left = TREE_OPERAND (arg0, 1);
3290 right = TREE_OPERAND (arg1, 1);
3292 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3294 common = TREE_OPERAND (arg0, 0);
3295 left = TREE_OPERAND (arg0, 1);
3296 right = TREE_OPERAND (arg1, 0);
3298 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3300 common = TREE_OPERAND (arg0, 1);
3301 left = TREE_OPERAND (arg0, 0);
3302 right = TREE_OPERAND (arg1, 1);
3304 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3306 common = TREE_OPERAND (arg0, 1);
3307 left = TREE_OPERAND (arg0, 0);
3308 right = TREE_OPERAND (arg1, 0);
3313 return fold_build2 (TREE_CODE (arg0), type, common,
3314 fold_build2 (code, type, left, right));
3317 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3318 with code CODE. This optimization is unsafe. */
3320 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3322 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3323 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3325 /* (A / C) +- (B / C) -> (A +- B) / C. */
3327 && operand_equal_p (TREE_OPERAND (arg0, 1),
3328 TREE_OPERAND (arg1, 1), 0))
3329 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3330 fold_build2 (code, type,
3331 TREE_OPERAND (arg0, 0),
3332 TREE_OPERAND (arg1, 0)),
3333 TREE_OPERAND (arg0, 1));
3335 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3336 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3337 TREE_OPERAND (arg1, 0), 0)
3338 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3339 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3341 REAL_VALUE_TYPE r0, r1;
3342 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3343 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3345 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3347 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3348 real_arithmetic (&r0, code, &r0, &r1);
3349 return fold_build2 (MULT_EXPR, type,
3350 TREE_OPERAND (arg0, 0),
3351 build_real (type, r0));
3357 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3358 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3361 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3368 tree size = TYPE_SIZE (TREE_TYPE (inner));
3369 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3370 || POINTER_TYPE_P (TREE_TYPE (inner)))
3371 && host_integerp (size, 0)
3372 && tree_low_cst (size, 0) == bitsize)
3373 return fold_convert (type, inner);
3376 result = build3 (BIT_FIELD_REF, type, inner,
3377 size_int (bitsize), bitsize_int (bitpos));
3379 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3384 /* Optimize a bit-field compare.
3386 There are two cases: First is a compare against a constant and the
3387 second is a comparison of two items where the fields are at the same
3388 bit position relative to the start of a chunk (byte, halfword, word)
3389 large enough to contain it. In these cases we can avoid the shift
3390 implicit in bitfield extractions.
3392 For constants, we emit a compare of the shifted constant with the
3393 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3394 compared. For two fields at the same position, we do the ANDs with the
3395 similar mask and compare the result of the ANDs.
3397 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3398 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3399 are the left and right operands of the comparison, respectively.
3401 If the optimization described above can be done, we return the resulting
3402 tree. Otherwise we return zero. */
3405 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3408 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3409 tree type = TREE_TYPE (lhs);
3410 tree signed_type, unsigned_type;
3411 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3412 enum machine_mode lmode, rmode, nmode;
3413 int lunsignedp, runsignedp;
3414 int lvolatilep = 0, rvolatilep = 0;
3415 tree linner, rinner = NULL_TREE;
3419 /* Get all the information about the extractions being done. If the bit size
3420 if the same as the size of the underlying object, we aren't doing an
3421 extraction at all and so can do nothing. We also don't want to
3422 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3423 then will no longer be able to replace it. */
3424 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3425 &lunsignedp, &lvolatilep, false);
3426 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3427 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3432 /* If this is not a constant, we can only do something if bit positions,
3433 sizes, and signedness are the same. */
3434 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3435 &runsignedp, &rvolatilep, false);
3437 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3438 || lunsignedp != runsignedp || offset != 0
3439 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3443 /* See if we can find a mode to refer to this field. We should be able to,
3444 but fail if we can't. */
3445 nmode = get_best_mode (lbitsize, lbitpos,
3446 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3447 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3448 TYPE_ALIGN (TREE_TYPE (rinner))),
3449 word_mode, lvolatilep || rvolatilep);
3450 if (nmode == VOIDmode)
3453 /* Set signed and unsigned types of the precision of this mode for the
3455 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3456 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3458 /* Compute the bit position and size for the new reference and our offset
3459 within it. If the new reference is the same size as the original, we
3460 won't optimize anything, so return zero. */
3461 nbitsize = GET_MODE_BITSIZE (nmode);
3462 nbitpos = lbitpos & ~ (nbitsize - 1);
3464 if (nbitsize == lbitsize)
3467 if (BYTES_BIG_ENDIAN)
3468 lbitpos = nbitsize - lbitsize - lbitpos;
3470 /* Make the mask to be used against the extracted field. */
3471 mask = build_int_cst_type (unsigned_type, -1);
3472 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3473 mask = const_binop (RSHIFT_EXPR, mask,
3474 size_int (nbitsize - lbitsize - lbitpos), 0);
3477 /* If not comparing with constant, just rework the comparison
3479 return fold_build2 (code, compare_type,
3480 fold_build2 (BIT_AND_EXPR, unsigned_type,
3481 make_bit_field_ref (linner,
3486 fold_build2 (BIT_AND_EXPR, unsigned_type,
3487 make_bit_field_ref (rinner,
3493 /* Otherwise, we are handling the constant case. See if the constant is too
3494 big for the field. Warn and return a tree of for 0 (false) if so. We do
3495 this not only for its own sake, but to avoid having to test for this
3496 error case below. If we didn't, we might generate wrong code.
3498 For unsigned fields, the constant shifted right by the field length should
3499 be all zero. For signed fields, the high-order bits should agree with
3504 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3505 fold_convert (unsigned_type, rhs),
3506 size_int (lbitsize), 0)))
3508 warning (0, "comparison is always %d due to width of bit-field",
3510 return constant_boolean_node (code == NE_EXPR, compare_type);
3515 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3516 size_int (lbitsize - 1), 0);
3517 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3519 warning (0, "comparison is always %d due to width of bit-field",
3521 return constant_boolean_node (code == NE_EXPR, compare_type);
3525 /* Single-bit compares should always be against zero. */
3526 if (lbitsize == 1 && ! integer_zerop (rhs))
3528 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3529 rhs = build_int_cst (type, 0);
3532 /* Make a new bitfield reference, shift the constant over the
3533 appropriate number of bits and mask it with the computed mask
3534 (in case this was a signed field). If we changed it, make a new one. */
3535 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3538 TREE_SIDE_EFFECTS (lhs) = 1;
3539 TREE_THIS_VOLATILE (lhs) = 1;
3542 rhs = const_binop (BIT_AND_EXPR,
3543 const_binop (LSHIFT_EXPR,
3544 fold_convert (unsigned_type, rhs),
3545 size_int (lbitpos), 0),
3548 return build2 (code, compare_type,
3549 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3553 /* Subroutine for fold_truthop: decode a field reference.
3555 If EXP is a comparison reference, we return the innermost reference.
3557 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3558 set to the starting bit number.
3560 If the innermost field can be completely contained in a mode-sized
3561 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3563 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3564 otherwise it is not changed.
3566 *PUNSIGNEDP is set to the signedness of the field.
3568 *PMASK is set to the mask used. This is either contained in a
3569 BIT_AND_EXPR or derived from the width of the field.
3571 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3573 Return 0 if this is not a component reference or is one that we can't
3574 do anything with. */
3577 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3578 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3579 int *punsignedp, int *pvolatilep,
3580 tree *pmask, tree *pand_mask)
3582 tree outer_type = 0;
3584 tree mask, inner, offset;
3586 unsigned int precision;
3588 /* All the optimizations using this function assume integer fields.
3589 There are problems with FP fields since the type_for_size call
3590 below can fail for, e.g., XFmode. */
3591 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3594 /* We are interested in the bare arrangement of bits, so strip everything
3595 that doesn't affect the machine mode. However, record the type of the
3596 outermost expression if it may matter below. */
3597 if (TREE_CODE (exp) == NOP_EXPR
3598 || TREE_CODE (exp) == CONVERT_EXPR
3599 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3600 outer_type = TREE_TYPE (exp);
3603 if (TREE_CODE (exp) == BIT_AND_EXPR)
3605 and_mask = TREE_OPERAND (exp, 1);
3606 exp = TREE_OPERAND (exp, 0);
3607 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3608 if (TREE_CODE (and_mask) != INTEGER_CST)
3612 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3613 punsignedp, pvolatilep, false);
3614 if ((inner == exp && and_mask == 0)
3615 || *pbitsize < 0 || offset != 0
3616 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3619 /* If the number of bits in the reference is the same as the bitsize of
3620 the outer type, then the outer type gives the signedness. Otherwise
3621 (in case of a small bitfield) the signedness is unchanged. */
3622 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3623 *punsignedp = TYPE_UNSIGNED (outer_type);
3625 /* Compute the mask to access the bitfield. */
3626 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3627 precision = TYPE_PRECISION (unsigned_type);
3629 mask = build_int_cst_type (unsigned_type, -1);
3631 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3632 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3634 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3636 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3637 fold_convert (unsigned_type, and_mask), mask);
3640 *pand_mask = and_mask;
3644 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3648 all_ones_mask_p (tree mask, int size)
3650 tree type = TREE_TYPE (mask);
3651 unsigned int precision = TYPE_PRECISION (type);
3654 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3657 tree_int_cst_equal (mask,
3658 const_binop (RSHIFT_EXPR,
3659 const_binop (LSHIFT_EXPR, tmask,
3660 size_int (precision - size),
3662 size_int (precision - size), 0));
3665 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3666 represents the sign bit of EXP's type. If EXP represents a sign
3667 or zero extension, also test VAL against the unextended type.
3668 The return value is the (sub)expression whose sign bit is VAL,
3669 or NULL_TREE otherwise. */
3672 sign_bit_p (tree exp, tree val)
3674 unsigned HOST_WIDE_INT mask_lo, lo;
3675 HOST_WIDE_INT mask_hi, hi;
3679 /* Tree EXP must have an integral type. */
3680 t = TREE_TYPE (exp);
3681 if (! INTEGRAL_TYPE_P (t))
3684 /* Tree VAL must be an integer constant. */
3685 if (TREE_CODE (val) != INTEGER_CST
3686 || TREE_OVERFLOW (val))
3689 width = TYPE_PRECISION (t);
3690 if (width > HOST_BITS_PER_WIDE_INT)
3692 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3695 mask_hi = ((unsigned HOST_WIDE_INT) -1
3696 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3702 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3705 mask_lo = ((unsigned HOST_WIDE_INT) -1
3706 >> (HOST_BITS_PER_WIDE_INT - width));
3709 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3710 treat VAL as if it were unsigned. */
3711 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3712 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3715 /* Handle extension from a narrower type. */
3716 if (TREE_CODE (exp) == NOP_EXPR
3717 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3718 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3723 /* Subroutine for fold_truthop: determine if an operand is simple enough
3724 to be evaluated unconditionally. */
3727 simple_operand_p (tree exp)
3729 /* Strip any conversions that don't change the machine mode. */
3732 return (CONSTANT_CLASS_P (exp)
3733 || TREE_CODE (exp) == SSA_NAME
3735 && ! TREE_ADDRESSABLE (exp)
3736 && ! TREE_THIS_VOLATILE (exp)
3737 && ! DECL_NONLOCAL (exp)
3738 /* Don't regard global variables as simple. They may be
3739 allocated in ways unknown to the compiler (shared memory,
3740 #pragma weak, etc). */
3741 && ! TREE_PUBLIC (exp)
3742 && ! DECL_EXTERNAL (exp)
3743 /* Loading a static variable is unduly expensive, but global
3744 registers aren't expensive. */
3745 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3748 /* The following functions are subroutines to fold_range_test and allow it to
3749 try to change a logical combination of comparisons into a range test.
3752 X == 2 || X == 3 || X == 4 || X == 5
3756 (unsigned) (X - 2) <= 3
3758 We describe each set of comparisons as being either inside or outside
3759 a range, using a variable named like IN_P, and then describe the
3760 range with a lower and upper bound. If one of the bounds is omitted,
3761 it represents either the highest or lowest value of the type.
3763 In the comments below, we represent a range by two numbers in brackets
3764 preceded by a "+" to designate being inside that range, or a "-" to
3765 designate being outside that range, so the condition can be inverted by
3766 flipping the prefix. An omitted bound is represented by a "-". For
3767 example, "- [-, 10]" means being outside the range starting at the lowest
3768 possible value and ending at 10, in other words, being greater than 10.
3769 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3772 We set up things so that the missing bounds are handled in a consistent
3773 manner so neither a missing bound nor "true" and "false" need to be
3774 handled using a special case. */
3776 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3777 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3778 and UPPER1_P are nonzero if the respective argument is an upper bound
3779 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3780 must be specified for a comparison. ARG1 will be converted to ARG0's
3781 type if both are specified. */
3784 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3785 tree arg1, int upper1_p)
3791 /* If neither arg represents infinity, do the normal operation.
3792 Else, if not a comparison, return infinity. Else handle the special
3793 comparison rules. Note that most of the cases below won't occur, but
3794 are handled for consistency. */
3796 if (arg0 != 0 && arg1 != 0)
3798 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3799 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3801 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3804 if (TREE_CODE_CLASS (code) != tcc_comparison)
3807 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3808 for neither. In real maths, we cannot assume open ended ranges are
3809 the same. But, this is computer arithmetic, where numbers are finite.
3810 We can therefore make the transformation of any unbounded range with
3811 the value Z, Z being greater than any representable number. This permits
3812 us to treat unbounded ranges as equal. */
3813 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3814 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3818 result = sgn0 == sgn1;
3821 result = sgn0 != sgn1;
3824 result = sgn0 < sgn1;
3827 result = sgn0 <= sgn1;
3830 result = sgn0 > sgn1;
3833 result = sgn0 >= sgn1;
3839 return constant_boolean_node (result, type);
3842 /* Given EXP, a logical expression, set the range it is testing into
3843 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3844 actually being tested. *PLOW and *PHIGH will be made of the same type
3845 as the returned expression. If EXP is not a comparison, we will most
3846 likely not be returning a useful value and range. */
3849 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3851 enum tree_code code;
3852 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3853 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3855 tree low, high, n_low, n_high;
3857 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3858 and see if we can refine the range. Some of the cases below may not
3859 happen, but it doesn't seem worth worrying about this. We "continue"
3860 the outer loop when we've changed something; otherwise we "break"
3861 the switch, which will "break" the while. */
3864 low = high = build_int_cst (TREE_TYPE (exp), 0);
3868 code = TREE_CODE (exp);
3869 exp_type = TREE_TYPE (exp);
3871 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3873 if (TREE_CODE_LENGTH (code) > 0)
3874 arg0 = TREE_OPERAND (exp, 0);
3875 if (TREE_CODE_CLASS (code) == tcc_comparison
3876 || TREE_CODE_CLASS (code) == tcc_unary
3877 || TREE_CODE_CLASS (code) == tcc_binary)
3878 arg0_type = TREE_TYPE (arg0);
3879 if (TREE_CODE_CLASS (code) == tcc_binary
3880 || TREE_CODE_CLASS (code) == tcc_comparison
3881 || (TREE_CODE_CLASS (code) == tcc_expression
3882 && TREE_CODE_LENGTH (code) > 1))
3883 arg1 = TREE_OPERAND (exp, 1);
3888 case TRUTH_NOT_EXPR:
3889 in_p = ! in_p, exp = arg0;
3892 case EQ_EXPR: case NE_EXPR:
3893 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3894 /* We can only do something if the range is testing for zero
3895 and if the second operand is an integer constant. Note that
3896 saying something is "in" the range we make is done by
3897 complementing IN_P since it will set in the initial case of
3898 being not equal to zero; "out" is leaving it alone. */
3899 if (low == 0 || high == 0
3900 || ! integer_zerop (low) || ! integer_zerop (high)
3901 || TREE_CODE (arg1) != INTEGER_CST)
3906 case NE_EXPR: /* - [c, c] */
3909 case EQ_EXPR: /* + [c, c] */
3910 in_p = ! in_p, low = high = arg1;
3912 case GT_EXPR: /* - [-, c] */
3913 low = 0, high = arg1;
3915 case GE_EXPR: /* + [c, -] */
3916 in_p = ! in_p, low = arg1, high = 0;
3918 case LT_EXPR: /* - [c, -] */
3919 low = arg1, high = 0;
3921 case LE_EXPR: /* + [-, c] */
3922 in_p = ! in_p, low = 0, high = arg1;
3928 /* If this is an unsigned comparison, we also know that EXP is
3929 greater than or equal to zero. We base the range tests we make
3930 on that fact, so we record it here so we can parse existing
3931 range tests. We test arg0_type since often the return type
3932 of, e.g. EQ_EXPR, is boolean. */
3933 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3935 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3937 build_int_cst (arg0_type, 0),
3941 in_p = n_in_p, low = n_low, high = n_high;
3943 /* If the high bound is missing, but we have a nonzero low
3944 bound, reverse the range so it goes from zero to the low bound
3946 if (high == 0 && low && ! integer_zerop (low))
3949 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3950 integer_one_node, 0);
3951 low = build_int_cst (arg0_type, 0);
3959 /* (-x) IN [a,b] -> x in [-b, -a] */
3960 n_low = range_binop (MINUS_EXPR, exp_type,
3961 build_int_cst (exp_type, 0),
3963 n_high = range_binop (MINUS_EXPR, exp_type,
3964 build_int_cst (exp_type, 0),
3966 low = n_low, high = n_high;
3972 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3973 build_int_cst (exp_type, 1));
3976 case PLUS_EXPR: case MINUS_EXPR:
3977 if (TREE_CODE (arg1) != INTEGER_CST)
3980 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3981 move a constant to the other side. */
3982 if (!TYPE_UNSIGNED (arg0_type)
3983 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3986 /* If EXP is signed, any overflow in the computation is undefined,
3987 so we don't worry about it so long as our computations on
3988 the bounds don't overflow. For unsigned, overflow is defined
3989 and this is exactly the right thing. */
3990 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3991 arg0_type, low, 0, arg1, 0);
3992 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3993 arg0_type, high, 1, arg1, 0);
3994 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3995 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3998 /* Check for an unsigned range which has wrapped around the maximum
3999 value thus making n_high < n_low, and normalize it. */
4000 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4002 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4003 integer_one_node, 0);
4004 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4005 integer_one_node, 0);
4007 /* If the range is of the form +/- [ x+1, x ], we won't
4008 be able to normalize it. But then, it represents the
4009 whole range or the empty set, so make it
4011 if (tree_int_cst_equal (n_low, low)
4012 && tree_int_cst_equal (n_high, high))
4018 low = n_low, high = n_high;
4023 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4024 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4027 if (! INTEGRAL_TYPE_P (arg0_type)
4028 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4029 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4032 n_low = low, n_high = high;
4035 n_low = fold_convert (arg0_type, n_low);
4038 n_high = fold_convert (arg0_type, n_high);
4041 /* If we're converting arg0 from an unsigned type, to exp,
4042 a signed type, we will be doing the comparison as unsigned.
4043 The tests above have already verified that LOW and HIGH
4046 So we have to ensure that we will handle large unsigned
4047 values the same way that the current signed bounds treat
4050 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4053 tree equiv_type = lang_hooks.types.type_for_mode
4054 (TYPE_MODE (arg0_type), 1);
4056 /* A range without an upper bound is, naturally, unbounded.
4057 Since convert would have cropped a very large value, use
4058 the max value for the destination type. */
4060 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4061 : TYPE_MAX_VALUE (arg0_type);
4063 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4064 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4065 fold_convert (arg0_type,
4067 build_int_cst (arg0_type, 1));
4069 /* If the low bound is specified, "and" the range with the
4070 range for which the original unsigned value will be
4074 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4075 1, n_low, n_high, 1,
4076 fold_convert (arg0_type,
4081 in_p = (n_in_p == in_p);
4085 /* Otherwise, "or" the range with the range of the input
4086 that will be interpreted as negative. */
4087 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4088 0, n_low, n_high, 1,
4089 fold_convert (arg0_type,
4094 in_p = (in_p != n_in_p);
4099 low = n_low, high = n_high;
4109 /* If EXP is a constant, we can evaluate whether this is true or false. */
4110 if (TREE_CODE (exp) == INTEGER_CST)
4112 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4114 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4120 *pin_p = in_p, *plow = low, *phigh = high;
4124 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4125 type, TYPE, return an expression to test if EXP is in (or out of, depending
4126 on IN_P) the range. Return 0 if the test couldn't be created. */
4129 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4131 tree etype = TREE_TYPE (exp);
4134 #ifdef HAVE_canonicalize_funcptr_for_compare
4135 /* Disable this optimization for function pointer expressions
4136 on targets that require function pointer canonicalization. */
4137 if (HAVE_canonicalize_funcptr_for_compare
4138 && TREE_CODE (etype) == POINTER_TYPE
4139 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4145 value = build_range_check (type, exp, 1, low, high);
4147 return invert_truthvalue (value);
4152 if (low == 0 && high == 0)
4153 return build_int_cst (type, 1);
4156 return fold_build2 (LE_EXPR, type, exp,
4157 fold_convert (etype, high));
4160 return fold_build2 (GE_EXPR, type, exp,
4161 fold_convert (etype, low));
4163 if (operand_equal_p (low, high, 0))
4164 return fold_build2 (EQ_EXPR, type, exp,
4165 fold_convert (etype, low));
4167 if (integer_zerop (low))
4169 if (! TYPE_UNSIGNED (etype))
4171 etype = lang_hooks.types.unsigned_type (etype);
4172 high = fold_convert (etype, high);
4173 exp = fold_convert (etype, exp);
4175 return build_range_check (type, exp, 1, 0, high);
4178 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4179 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4181 unsigned HOST_WIDE_INT lo;
4185 prec = TYPE_PRECISION (etype);
4186 if (prec <= HOST_BITS_PER_WIDE_INT)
4189 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4193 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4194 lo = (unsigned HOST_WIDE_INT) -1;
4197 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4199 if (TYPE_UNSIGNED (etype))
4201 etype = lang_hooks.types.signed_type (etype);
4202 exp = fold_convert (etype, exp);
4204 return fold_build2 (GT_EXPR, type, exp,
4205 build_int_cst (etype, 0));
4209 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4210 This requires wrap-around arithmetics for the type of the expression. */
4211 switch (TREE_CODE (etype))
4214 /* There is no requirement that LOW be within the range of ETYPE
4215 if the latter is a subtype. It must, however, be within the base
4216 type of ETYPE. So be sure we do the subtraction in that type. */
4217 if (TREE_TYPE (etype))
4218 etype = TREE_TYPE (etype);
4223 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4224 TYPE_UNSIGNED (etype));
4231 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4232 if (TREE_CODE (etype) == INTEGER_TYPE
4233 && !TYPE_OVERFLOW_WRAPS (etype))
4235 tree utype, minv, maxv;
4237 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4238 for the type in question, as we rely on this here. */
4239 utype = lang_hooks.types.unsigned_type (etype);
4240 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4241 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4242 integer_one_node, 1);
4243 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4245 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4252 high = fold_convert (etype, high);
4253 low = fold_convert (etype, low);
4254 exp = fold_convert (etype, exp);
4256 value = const_binop (MINUS_EXPR, high, low, 0);
4258 if (value != 0 && !TREE_OVERFLOW (value))
4259 return build_range_check (type,
4260 fold_build2 (MINUS_EXPR, etype, exp, low),
4261 1, build_int_cst (etype, 0), value);
4266 /* Return the predecessor of VAL in its type, handling the infinite case. */
4269 range_predecessor (tree val)
4271 tree type = TREE_TYPE (val);
4273 if (INTEGRAL_TYPE_P (type)
4274 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4277 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4280 /* Return the successor of VAL in its type, handling the infinite case. */
4283 range_successor (tree val)
4285 tree type = TREE_TYPE (val);
4287 if (INTEGRAL_TYPE_P (type)
4288 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4291 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4294 /* Given two ranges, see if we can merge them into one. Return 1 if we
4295 can, 0 if we can't. Set the output range into the specified parameters. */
4298 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4299 tree high0, int in1_p, tree low1, tree high1)
4307 int lowequal = ((low0 == 0 && low1 == 0)
4308 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4309 low0, 0, low1, 0)));
4310 int highequal = ((high0 == 0 && high1 == 0)
4311 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4312 high0, 1, high1, 1)));
4314 /* Make range 0 be the range that starts first, or ends last if they
4315 start at the same value. Swap them if it isn't. */
4316 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4319 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4320 high1, 1, high0, 1))))
4322 temp = in0_p, in0_p = in1_p, in1_p = temp;
4323 tem = low0, low0 = low1, low1 = tem;
4324 tem = high0, high0 = high1, high1 = tem;
4327 /* Now flag two cases, whether the ranges are disjoint or whether the
4328 second range is totally subsumed in the first. Note that the tests
4329 below are simplified by the ones above. */
4330 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4331 high0, 1, low1, 0));
4332 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4333 high1, 1, high0, 1));
4335 /* We now have four cases, depending on whether we are including or
4336 excluding the two ranges. */
4339 /* If they don't overlap, the result is false. If the second range
4340 is a subset it is the result. Otherwise, the range is from the start
4341 of the second to the end of the first. */
4343 in_p = 0, low = high = 0;
4345 in_p = 1, low = low1, high = high1;
4347 in_p = 1, low = low1, high = high0;
4350 else if (in0_p && ! in1_p)
4352 /* If they don't overlap, the result is the first range. If they are
4353 equal, the result is false. If the second range is a subset of the
4354 first, and the ranges begin at the same place, we go from just after
4355 the end of the second range to the end of the first. If the second
4356 range is not a subset of the first, or if it is a subset and both
4357 ranges end at the same place, the range starts at the start of the
4358 first range and ends just before the second range.
4359 Otherwise, we can't describe this as a single range. */
4361 in_p = 1, low = low0, high = high0;
4362 else if (lowequal && highequal)
4363 in_p = 0, low = high = 0;
4364 else if (subset && lowequal)
4366 low = range_successor (high1);
4370 else if (! subset || highequal)
4373 high = range_predecessor (low1);
4380 else if (! in0_p && in1_p)
4382 /* If they don't overlap, the result is the second range. If the second
4383 is a subset of the first, the result is false. Otherwise,
4384 the range starts just after the first range and ends at the
4385 end of the second. */
4387 in_p = 1, low = low1, high = high1;
4388 else if (subset || highequal)
4389 in_p = 0, low = high = 0;
4392 low = range_successor (high0);
4400 /* The case where we are excluding both ranges. Here the complex case
4401 is if they don't overlap. In that case, the only time we have a
4402 range is if they are adjacent. If the second is a subset of the
4403 first, the result is the first. Otherwise, the range to exclude
4404 starts at the beginning of the first range and ends at the end of the
4408 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4409 range_successor (high0),
4411 in_p = 0, low = low0, high = high1;
4414 /* Canonicalize - [min, x] into - [-, x]. */
4415 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4416 switch (TREE_CODE (TREE_TYPE (low0)))
4419 if (TYPE_PRECISION (TREE_TYPE (low0))
4420 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4424 if (tree_int_cst_equal (low0,
4425 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4429 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4430 && integer_zerop (low0))
4437 /* Canonicalize - [x, max] into - [x, -]. */
4438 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4439 switch (TREE_CODE (TREE_TYPE (high1)))
4442 if (TYPE_PRECISION (TREE_TYPE (high1))
4443 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4447 if (tree_int_cst_equal (high1,
4448 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4452 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4453 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4455 integer_one_node, 1)))
4462 /* The ranges might be also adjacent between the maximum and
4463 minimum values of the given type. For
4464 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4465 return + [x + 1, y - 1]. */
4466 if (low0 == 0 && high1 == 0)
4468 low = range_successor (high0);
4469 high = range_predecessor (low1);
4470 if (low == 0 || high == 0)
4480 in_p = 0, low = low0, high = high0;
4482 in_p = 0, low = low0, high = high1;
4485 *pin_p = in_p, *plow = low, *phigh = high;
4490 /* Subroutine of fold, looking inside expressions of the form
4491 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4492 of the COND_EXPR. This function is being used also to optimize
4493 A op B ? C : A, by reversing the comparison first.
4495 Return a folded expression whose code is not a COND_EXPR
4496 anymore, or NULL_TREE if no folding opportunity is found. */
4499 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4501 enum tree_code comp_code = TREE_CODE (arg0);
4502 tree arg00 = TREE_OPERAND (arg0, 0);
4503 tree arg01 = TREE_OPERAND (arg0, 1);
4504 tree arg1_type = TREE_TYPE (arg1);
4510 /* If we have A op 0 ? A : -A, consider applying the following
4513 A == 0? A : -A same as -A
4514 A != 0? A : -A same as A
4515 A >= 0? A : -A same as abs (A)
4516 A > 0? A : -A same as abs (A)
4517 A <= 0? A : -A same as -abs (A)
4518 A < 0? A : -A same as -abs (A)
4520 None of these transformations work for modes with signed
4521 zeros. If A is +/-0, the first two transformations will
4522 change the sign of the result (from +0 to -0, or vice
4523 versa). The last four will fix the sign of the result,
4524 even though the original expressions could be positive or
4525 negative, depending on the sign of A.
4527 Note that all these transformations are correct if A is
4528 NaN, since the two alternatives (A and -A) are also NaNs. */
4529 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4530 ? real_zerop (arg01)
4531 : integer_zerop (arg01))
4532 && ((TREE_CODE (arg2) == NEGATE_EXPR
4533 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4534 /* In the case that A is of the form X-Y, '-A' (arg2) may
4535 have already been folded to Y-X, check for that. */
4536 || (TREE_CODE (arg1) == MINUS_EXPR
4537 && TREE_CODE (arg2) == MINUS_EXPR
4538 && operand_equal_p (TREE_OPERAND (arg1, 0),
4539 TREE_OPERAND (arg2, 1), 0)
4540 && operand_equal_p (TREE_OPERAND (arg1, 1),
4541 TREE_OPERAND (arg2, 0), 0))))
4546 tem = fold_convert (arg1_type, arg1);
4547 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4550 return pedantic_non_lvalue (fold_convert (type, arg1));
4553 if (flag_trapping_math)
4558 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4559 arg1 = fold_convert (lang_hooks.types.signed_type
4560 (TREE_TYPE (arg1)), arg1);
4561 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4562 return pedantic_non_lvalue (fold_convert (type, tem));
4565 if (flag_trapping_math)
4569 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4570 arg1 = fold_convert (lang_hooks.types.signed_type
4571 (TREE_TYPE (arg1)), arg1);
4572 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4573 return negate_expr (fold_convert (type, tem));
4575 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4579 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4580 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4581 both transformations are correct when A is NaN: A != 0
4582 is then true, and A == 0 is false. */
4584 if (integer_zerop (arg01) && integer_zerop (arg2))
4586 if (comp_code == NE_EXPR)
4587 return pedantic_non_lvalue (fold_convert (type, arg1));
4588 else if (comp_code == EQ_EXPR)
4589 return build_int_cst (type, 0);
4592 /* Try some transformations of A op B ? A : B.
4594 A == B? A : B same as B
4595 A != B? A : B same as A
4596 A >= B? A : B same as max (A, B)
4597 A > B? A : B same as max (B, A)
4598 A <= B? A : B same as min (A, B)
4599 A < B? A : B same as min (B, A)
4601 As above, these transformations don't work in the presence
4602 of signed zeros. For example, if A and B are zeros of
4603 opposite sign, the first two transformations will change
4604 the sign of the result. In the last four, the original
4605 expressions give different results for (A=+0, B=-0) and
4606 (A=-0, B=+0), but the transformed expressions do not.
4608 The first two transformations are correct if either A or B
4609 is a NaN. In the first transformation, the condition will
4610 be false, and B will indeed be chosen. In the case of the
4611 second transformation, the condition A != B will be true,
4612 and A will be chosen.
4614 The conversions to max() and min() are not correct if B is
4615 a number and A is not. The conditions in the original
4616 expressions will be false, so all four give B. The min()
4617 and max() versions would give a NaN instead. */
4618 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4619 /* Avoid these transformations if the COND_EXPR may be used
4620 as an lvalue in the C++ front-end. PR c++/19199. */
4622 || (strcmp (lang_hooks.name, "GNU C++") != 0
4623 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4624 || ! maybe_lvalue_p (arg1)
4625 || ! maybe_lvalue_p (arg2)))
4627 tree comp_op0 = arg00;
4628 tree comp_op1 = arg01;
4629 tree comp_type = TREE_TYPE (comp_op0);
4631 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4632 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4642 return pedantic_non_lvalue (fold_convert (type, arg2));
4644 return pedantic_non_lvalue (fold_convert (type, arg1));
4649 /* In C++ a ?: expression can be an lvalue, so put the
4650 operand which will be used if they are equal first
4651 so that we can convert this back to the
4652 corresponding COND_EXPR. */
4653 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4655 comp_op0 = fold_convert (comp_type, comp_op0);
4656 comp_op1 = fold_convert (comp_type, comp_op1);
4657 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4658 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4659 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4660 return pedantic_non_lvalue (fold_convert (type, tem));
4667 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4669 comp_op0 = fold_convert (comp_type, comp_op0);
4670 comp_op1 = fold_convert (comp_type, comp_op1);
4671 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4672 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4673 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4674 return pedantic_non_lvalue (fold_convert (type, tem));
4678 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4679 return pedantic_non_lvalue (fold_convert (type, arg2));
4682 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4683 return pedantic_non_lvalue (fold_convert (type, arg1));
4686 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4691 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4692 we might still be able to simplify this. For example,
4693 if C1 is one less or one more than C2, this might have started
4694 out as a MIN or MAX and been transformed by this function.
4695 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4697 if (INTEGRAL_TYPE_P (type)
4698 && TREE_CODE (arg01) == INTEGER_CST
4699 && TREE_CODE (arg2) == INTEGER_CST)
4703 /* We can replace A with C1 in this case. */
4704 arg1 = fold_convert (type, arg01);
4705 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4708 /* If C1 is C2 + 1, this is min(A, C2). */
4709 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4711 && operand_equal_p (arg01,
4712 const_binop (PLUS_EXPR, arg2,
4713 build_int_cst (type, 1), 0),
4715 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4720 /* If C1 is C2 - 1, this is min(A, C2). */
4721 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4723 && operand_equal_p (arg01,
4724 const_binop (MINUS_EXPR, arg2,
4725 build_int_cst (type, 1), 0),
4727 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4732 /* If C1 is C2 - 1, this is max(A, C2). */
4733 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4735 && operand_equal_p (arg01,
4736 const_binop (MINUS_EXPR, arg2,
4737 build_int_cst (type, 1), 0),
4739 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4744 /* If C1 is C2 + 1, this is max(A, C2). */
4745 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4747 && operand_equal_p (arg01,
4748 const_binop (PLUS_EXPR, arg2,
4749 build_int_cst (type, 1), 0),
4751 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4765 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4766 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4769 /* EXP is some logical combination of boolean tests. See if we can
4770 merge it into some range test. Return the new tree if so. */
4773 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4775 int or_op = (code == TRUTH_ORIF_EXPR
4776 || code == TRUTH_OR_EXPR);
4777 int in0_p, in1_p, in_p;
4778 tree low0, low1, low, high0, high1, high;
4779 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4780 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4783 /* If this is an OR operation, invert both sides; we will invert
4784 again at the end. */
4786 in0_p = ! in0_p, in1_p = ! in1_p;
4788 /* If both expressions are the same, if we can merge the ranges, and we
4789 can build the range test, return it or it inverted. If one of the
4790 ranges is always true or always false, consider it to be the same
4791 expression as the other. */
4792 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4793 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4795 && 0 != (tem = (build_range_check (type,
4797 : rhs != 0 ? rhs : integer_zero_node,
4799 return or_op ? invert_truthvalue (tem) : tem;
4801 /* On machines where the branch cost is expensive, if this is a
4802 short-circuited branch and the underlying object on both sides
4803 is the same, make a non-short-circuit operation. */
4804 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4805 && lhs != 0 && rhs != 0
4806 && (code == TRUTH_ANDIF_EXPR
4807 || code == TRUTH_ORIF_EXPR)
4808 && operand_equal_p (lhs, rhs, 0))
4810 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4811 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4812 which cases we can't do this. */
4813 if (simple_operand_p (lhs))
4814 return build2 (code == TRUTH_ANDIF_EXPR
4815 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4818 else if (lang_hooks.decls.global_bindings_p () == 0
4819 && ! CONTAINS_PLACEHOLDER_P (lhs))
4821 tree common = save_expr (lhs);
4823 if (0 != (lhs = build_range_check (type, common,
4824 or_op ? ! in0_p : in0_p,
4826 && (0 != (rhs = build_range_check (type, common,
4827 or_op ? ! in1_p : in1_p,
4829 return build2 (code == TRUTH_ANDIF_EXPR
4830 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4838 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4839 bit value. Arrange things so the extra bits will be set to zero if and
4840 only if C is signed-extended to its full width. If MASK is nonzero,
4841 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4844 unextend (tree c, int p, int unsignedp, tree mask)
4846 tree type = TREE_TYPE (c);
4847 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4850 if (p == modesize || unsignedp)
4853 /* We work by getting just the sign bit into the low-order bit, then
4854 into the high-order bit, then sign-extend. We then XOR that value
4856 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4857 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4859 /* We must use a signed type in order to get an arithmetic right shift.
4860 However, we must also avoid introducing accidental overflows, so that
4861 a subsequent call to integer_zerop will work. Hence we must
4862 do the type conversion here. At this point, the constant is either
4863 zero or one, and the conversion to a signed type can never overflow.
4864 We could get an overflow if this conversion is done anywhere else. */
4865 if (TYPE_UNSIGNED (type))
4866 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4868 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4869 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4871 temp = const_binop (BIT_AND_EXPR, temp,
4872 fold_convert (TREE_TYPE (c), mask), 0);
4873 /* If necessary, convert the type back to match the type of C. */
4874 if (TYPE_UNSIGNED (type))
4875 temp = fold_convert (type, temp);
4877 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4880 /* Find ways of folding logical expressions of LHS and RHS:
4881 Try to merge two comparisons to the same innermost item.
4882 Look for range tests like "ch >= '0' && ch <= '9'".
4883 Look for combinations of simple terms on machines with expensive branches
4884 and evaluate the RHS unconditionally.
4886 For example, if we have p->a == 2 && p->b == 4 and we can make an
4887 object large enough to span both A and B, we can do this with a comparison
4888 against the object ANDed with the a mask.
4890 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4891 operations to do this with one comparison.
4893 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4894 function and the one above.
4896 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4897 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4899 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4902 We return the simplified tree or 0 if no optimization is possible. */
4905 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4907 /* If this is the "or" of two comparisons, we can do something if
4908 the comparisons are NE_EXPR. If this is the "and", we can do something
4909 if the comparisons are EQ_EXPR. I.e.,
4910 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4912 WANTED_CODE is this operation code. For single bit fields, we can
4913 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4914 comparison for one-bit fields. */
4916 enum tree_code wanted_code;
4917 enum tree_code lcode, rcode;
4918 tree ll_arg, lr_arg, rl_arg, rr_arg;
4919 tree ll_inner, lr_inner, rl_inner, rr_inner;
4920 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4921 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4922 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4923 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4924 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4925 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4926 enum machine_mode lnmode, rnmode;
4927 tree ll_mask, lr_mask, rl_mask, rr_mask;
4928 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4929 tree l_const, r_const;
4930 tree lntype, rntype, result;
4931 int first_bit, end_bit;
4933 tree orig_lhs = lhs, orig_rhs = rhs;
4934 enum tree_code orig_code = code;
4936 /* Start by getting the comparison codes. Fail if anything is volatile.
4937 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4938 it were surrounded with a NE_EXPR. */
4940 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4943 lcode = TREE_CODE (lhs);
4944 rcode = TREE_CODE (rhs);
4946 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4948 lhs = build2 (NE_EXPR, truth_type, lhs,
4949 build_int_cst (TREE_TYPE (lhs), 0));
4953 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4955 rhs = build2 (NE_EXPR, truth_type, rhs,
4956 build_int_cst (TREE_TYPE (rhs), 0));
4960 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4961 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4964 ll_arg = TREE_OPERAND (lhs, 0);
4965 lr_arg = TREE_OPERAND (lhs, 1);
4966 rl_arg = TREE_OPERAND (rhs, 0);
4967 rr_arg = TREE_OPERAND (rhs, 1);
4969 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4970 if (simple_operand_p (ll_arg)
4971 && simple_operand_p (lr_arg))
4974 if (operand_equal_p (ll_arg, rl_arg, 0)
4975 && operand_equal_p (lr_arg, rr_arg, 0))
4977 result = combine_comparisons (code, lcode, rcode,
4978 truth_type, ll_arg, lr_arg);
4982 else if (operand_equal_p (ll_arg, rr_arg, 0)
4983 && operand_equal_p (lr_arg, rl_arg, 0))
4985 result = combine_comparisons (code, lcode,
4986 swap_tree_comparison (rcode),
4987 truth_type, ll_arg, lr_arg);
4993 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4994 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4996 /* If the RHS can be evaluated unconditionally and its operands are
4997 simple, it wins to evaluate the RHS unconditionally on machines
4998 with expensive branches. In this case, this isn't a comparison
4999 that can be merged. Avoid doing this if the RHS is a floating-point
5000 comparison since those can trap. */
5002 if (BRANCH_COST >= 2
5003 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5004 && simple_operand_p (rl_arg)
5005 && simple_operand_p (rr_arg))
5007 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5008 if (code == TRUTH_OR_EXPR
5009 && lcode == NE_EXPR && integer_zerop (lr_arg)
5010 && rcode == NE_EXPR && integer_zerop (rr_arg)
5011 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5012 return build2 (NE_EXPR, truth_type,
5013 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5015 build_int_cst (TREE_TYPE (ll_arg), 0));
5017 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5018 if (code == TRUTH_AND_EXPR
5019 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5020 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5021 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5022 return build2 (EQ_EXPR, truth_type,
5023 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5025 build_int_cst (TREE_TYPE (ll_arg), 0));
5027 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5029 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5030 return build2 (code, truth_type, lhs, rhs);
5035 /* See if the comparisons can be merged. Then get all the parameters for
5038 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5039 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5043 ll_inner = decode_field_reference (ll_arg,
5044 &ll_bitsize, &ll_bitpos, &ll_mode,
5045 &ll_unsignedp, &volatilep, &ll_mask,
5047 lr_inner = decode_field_reference (lr_arg,
5048 &lr_bitsize, &lr_bitpos, &lr_mode,
5049 &lr_unsignedp, &volatilep, &lr_mask,
5051 rl_inner = decode_field_reference (rl_arg,
5052 &rl_bitsize, &rl_bitpos, &rl_mode,
5053 &rl_unsignedp, &volatilep, &rl_mask,
5055 rr_inner = decode_field_reference (rr_arg,
5056 &rr_bitsize, &rr_bitpos, &rr_mode,
5057 &rr_unsignedp, &volatilep, &rr_mask,
5060 /* It must be true that the inner operation on the lhs of each
5061 comparison must be the same if we are to be able to do anything.
5062 Then see if we have constants. If not, the same must be true for
5064 if (volatilep || ll_inner == 0 || rl_inner == 0
5065 || ! operand_equal_p (ll_inner, rl_inner, 0))
5068 if (TREE_CODE (lr_arg) == INTEGER_CST
5069 && TREE_CODE (rr_arg) == INTEGER_CST)
5070 l_const = lr_arg, r_const = rr_arg;
5071 else if (lr_inner == 0 || rr_inner == 0
5072 || ! operand_equal_p (lr_inner, rr_inner, 0))
5075 l_const = r_const = 0;
5077 /* If either comparison code is not correct for our logical operation,
5078 fail. However, we can convert a one-bit comparison against zero into
5079 the opposite comparison against that bit being set in the field. */
5081 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5082 if (lcode != wanted_code)
5084 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5086 /* Make the left operand unsigned, since we are only interested
5087 in the value of one bit. Otherwise we are doing the wrong
5096 /* This is analogous to the code for l_const above. */
5097 if (rcode != wanted_code)
5099 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5108 /* See if we can find a mode that contains both fields being compared on
5109 the left. If we can't, fail. Otherwise, update all constants and masks
5110 to be relative to a field of that size. */
5111 first_bit = MIN (ll_bitpos, rl_bitpos);
5112 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5113 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5114 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5116 if (lnmode == VOIDmode)
5119 lnbitsize = GET_MODE_BITSIZE (lnmode);
5120 lnbitpos = first_bit & ~ (lnbitsize - 1);
5121 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5122 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5124 if (BYTES_BIG_ENDIAN)
5126 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5127 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5130 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5131 size_int (xll_bitpos), 0);
5132 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5133 size_int (xrl_bitpos), 0);
5137 l_const = fold_convert (lntype, l_const);
5138 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5139 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5140 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5141 fold_build1 (BIT_NOT_EXPR,
5145 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5147 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5152 r_const = fold_convert (lntype, r_const);
5153 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5154 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5155 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5156 fold_build1 (BIT_NOT_EXPR,
5160 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5162 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5166 /* If the right sides are not constant, do the same for it. Also,
5167 disallow this optimization if a size or signedness mismatch occurs
5168 between the left and right sides. */
5171 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5172 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5173 /* Make sure the two fields on the right
5174 correspond to the left without being swapped. */
5175 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5178 first_bit = MIN (lr_bitpos, rr_bitpos);
5179 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5180 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5181 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5183 if (rnmode == VOIDmode)
5186 rnbitsize = GET_MODE_BITSIZE (rnmode);
5187 rnbitpos = first_bit & ~ (rnbitsize - 1);
5188 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5189 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5191 if (BYTES_BIG_ENDIAN)
5193 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5194 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5197 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5198 size_int (xlr_bitpos), 0);
5199 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5200 size_int (xrr_bitpos), 0);
5202 /* Make a mask that corresponds to both fields being compared.
5203 Do this for both items being compared. If the operands are the
5204 same size and the bits being compared are in the same position
5205 then we can do this by masking both and comparing the masked
5207 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5208 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5209 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5211 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5212 ll_unsignedp || rl_unsignedp);
5213 if (! all_ones_mask_p (ll_mask, lnbitsize))
5214 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5216 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5217 lr_unsignedp || rr_unsignedp);
5218 if (! all_ones_mask_p (lr_mask, rnbitsize))
5219 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5221 return build2 (wanted_code, truth_type, lhs, rhs);
5224 /* There is still another way we can do something: If both pairs of
5225 fields being compared are adjacent, we may be able to make a wider
5226 field containing them both.
5228 Note that we still must mask the lhs/rhs expressions. Furthermore,
5229 the mask must be shifted to account for the shift done by
5230 make_bit_field_ref. */
5231 if ((ll_bitsize + ll_bitpos == rl_bitpos
5232 && lr_bitsize + lr_bitpos == rr_bitpos)
5233 || (ll_bitpos == rl_bitpos + rl_bitsize
5234 && lr_bitpos == rr_bitpos + rr_bitsize))
5238 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5239 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5240 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5241 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5243 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5244 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5245 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5246 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5248 /* Convert to the smaller type before masking out unwanted bits. */
5250 if (lntype != rntype)
5252 if (lnbitsize > rnbitsize)
5254 lhs = fold_convert (rntype, lhs);
5255 ll_mask = fold_convert (rntype, ll_mask);
5258 else if (lnbitsize < rnbitsize)
5260 rhs = fold_convert (lntype, rhs);
5261 lr_mask = fold_convert (lntype, lr_mask);
5266 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5267 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5269 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5270 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5272 return build2 (wanted_code, truth_type, lhs, rhs);
5278 /* Handle the case of comparisons with constants. If there is something in
5279 common between the masks, those bits of the constants must be the same.
5280 If not, the condition is always false. Test for this to avoid generating
5281 incorrect code below. */
5282 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5283 if (! integer_zerop (result)
5284 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5285 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5287 if (wanted_code == NE_EXPR)
5289 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5290 return constant_boolean_node (true, truth_type);
5294 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5295 return constant_boolean_node (false, truth_type);
5299 /* Construct the expression we will return. First get the component
5300 reference we will make. Unless the mask is all ones the width of
5301 that field, perform the mask operation. Then compare with the
5303 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5304 ll_unsignedp || rl_unsignedp);
5306 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5307 if (! all_ones_mask_p (ll_mask, lnbitsize))
5308 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5310 return build2 (wanted_code, truth_type, result,
5311 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5314 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5318 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5321 enum tree_code op_code;
5322 tree comp_const = op1;
5324 int consts_equal, consts_lt;
5327 STRIP_SIGN_NOPS (arg0);
5329 op_code = TREE_CODE (arg0);
5330 minmax_const = TREE_OPERAND (arg0, 1);
5331 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5332 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5333 inner = TREE_OPERAND (arg0, 0);
5335 /* If something does not permit us to optimize, return the original tree. */
5336 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5337 || TREE_CODE (comp_const) != INTEGER_CST
5338 || TREE_OVERFLOW (comp_const)
5339 || TREE_CODE (minmax_const) != INTEGER_CST
5340 || TREE_OVERFLOW (minmax_const))
5343 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5344 and GT_EXPR, doing the rest with recursive calls using logical
5348 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5350 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5353 return invert_truthvalue (tem);
5359 fold_build2 (TRUTH_ORIF_EXPR, type,
5360 optimize_minmax_comparison
5361 (EQ_EXPR, type, arg0, comp_const),
5362 optimize_minmax_comparison
5363 (GT_EXPR, type, arg0, comp_const));
5366 if (op_code == MAX_EXPR && consts_equal)
5367 /* MAX (X, 0) == 0 -> X <= 0 */
5368 return fold_build2 (LE_EXPR, type, inner, comp_const);
5370 else if (op_code == MAX_EXPR && consts_lt)
5371 /* MAX (X, 0) == 5 -> X == 5 */
5372 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5374 else if (op_code == MAX_EXPR)
5375 /* MAX (X, 0) == -1 -> false */
5376 return omit_one_operand (type, integer_zero_node, inner);
5378 else if (consts_equal)
5379 /* MIN (X, 0) == 0 -> X >= 0 */
5380 return fold_build2 (GE_EXPR, type, inner, comp_const);
5383 /* MIN (X, 0) == 5 -> false */
5384 return omit_one_operand (type, integer_zero_node, inner);
5387 /* MIN (X, 0) == -1 -> X == -1 */
5388 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5391 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5392 /* MAX (X, 0) > 0 -> X > 0
5393 MAX (X, 0) > 5 -> X > 5 */
5394 return fold_build2 (GT_EXPR, type, inner, comp_const);
5396 else if (op_code == MAX_EXPR)
5397 /* MAX (X, 0) > -1 -> true */
5398 return omit_one_operand (type, integer_one_node, inner);
5400 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5401 /* MIN (X, 0) > 0 -> false
5402 MIN (X, 0) > 5 -> false */
5403 return omit_one_operand (type, integer_zero_node, inner);
5406 /* MIN (X, 0) > -1 -> X > -1 */
5407 return fold_build2 (GT_EXPR, type, inner, comp_const);
5414 /* T is an integer expression that is being multiplied, divided, or taken a
5415 modulus (CODE says which and what kind of divide or modulus) by a
5416 constant C. See if we can eliminate that operation by folding it with
5417 other operations already in T. WIDE_TYPE, if non-null, is a type that
5418 should be used for the computation if wider than our type.
5420 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5421 (X * 2) + (Y * 4). We must, however, be assured that either the original
5422 expression would not overflow or that overflow is undefined for the type
5423 in the language in question.
5425 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5426 the machine has a multiply-accumulate insn or that this is part of an
5427 addressing calculation.
5429 If we return a non-null expression, it is an equivalent form of the
5430 original computation, but need not be in the original type. */
5433 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5435 /* To avoid exponential search depth, refuse to allow recursion past
5436 three levels. Beyond that (1) it's highly unlikely that we'll find
5437 something interesting and (2) we've probably processed it before
5438 when we built the inner expression. */
5447 ret = extract_muldiv_1 (t, c, code, wide_type);
5454 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5456 tree type = TREE_TYPE (t);
5457 enum tree_code tcode = TREE_CODE (t);
5458 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5459 > GET_MODE_SIZE (TYPE_MODE (type)))
5460 ? wide_type : type);
5462 int same_p = tcode == code;
5463 tree op0 = NULL_TREE, op1 = NULL_TREE;
5465 /* Don't deal with constants of zero here; they confuse the code below. */
5466 if (integer_zerop (c))
5469 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5470 op0 = TREE_OPERAND (t, 0);
5472 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5473 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5475 /* Note that we need not handle conditional operations here since fold
5476 already handles those cases. So just do arithmetic here. */
5480 /* For a constant, we can always simplify if we are a multiply
5481 or (for divide and modulus) if it is a multiple of our constant. */
5482 if (code == MULT_EXPR
5483 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5484 return const_binop (code, fold_convert (ctype, t),
5485 fold_convert (ctype, c), 0);
5488 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5489 /* If op0 is an expression ... */
5490 if ((COMPARISON_CLASS_P (op0)
5491 || UNARY_CLASS_P (op0)
5492 || BINARY_CLASS_P (op0)
5493 || EXPRESSION_CLASS_P (op0))
5494 /* ... and is unsigned, and its type is smaller than ctype,
5495 then we cannot pass through as widening. */
5496 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5497 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5498 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5499 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5500 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5501 /* ... or this is a truncation (t is narrower than op0),
5502 then we cannot pass through this narrowing. */
5503 || (GET_MODE_SIZE (TYPE_MODE (type))
5504 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5505 /* ... or signedness changes for division or modulus,
5506 then we cannot pass through this conversion. */
5507 || (code != MULT_EXPR
5508 && (TYPE_UNSIGNED (ctype)
5509 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5512 /* Pass the constant down and see if we can make a simplification. If
5513 we can, replace this expression with the inner simplification for
5514 possible later conversion to our or some other type. */
5515 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5516 && TREE_CODE (t2) == INTEGER_CST
5517 && !TREE_OVERFLOW (t2)
5518 && (0 != (t1 = extract_muldiv (op0, t2, code,
5520 ? ctype : NULL_TREE))))
5525 /* If widening the type changes it from signed to unsigned, then we
5526 must avoid building ABS_EXPR itself as unsigned. */
5527 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5529 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5530 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5532 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5533 return fold_convert (ctype, t1);
5539 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5540 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5543 case MIN_EXPR: case MAX_EXPR:
5544 /* If widening the type changes the signedness, then we can't perform
5545 this optimization as that changes the result. */
5546 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5549 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5550 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5551 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5553 if (tree_int_cst_sgn (c) < 0)
5554 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5556 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5557 fold_convert (ctype, t2));
5561 case LSHIFT_EXPR: case RSHIFT_EXPR:
5562 /* If the second operand is constant, this is a multiplication
5563 or floor division, by a power of two, so we can treat it that
5564 way unless the multiplier or divisor overflows. Signed
5565 left-shift overflow is implementation-defined rather than
5566 undefined in C90, so do not convert signed left shift into
5568 if (TREE_CODE (op1) == INTEGER_CST
5569 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5570 /* const_binop may not detect overflow correctly,
5571 so check for it explicitly here. */
5572 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5573 && TREE_INT_CST_HIGH (op1) == 0
5574 && 0 != (t1 = fold_convert (ctype,
5575 const_binop (LSHIFT_EXPR,
5578 && !TREE_OVERFLOW (t1))
5579 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5580 ? MULT_EXPR : FLOOR_DIV_EXPR,
5581 ctype, fold_convert (ctype, op0), t1),
5582 c, code, wide_type);
5585 case PLUS_EXPR: case MINUS_EXPR:
5586 /* See if we can eliminate the operation on both sides. If we can, we
5587 can return a new PLUS or MINUS. If we can't, the only remaining
5588 cases where we can do anything are if the second operand is a
5590 t1 = extract_muldiv (op0, c, code, wide_type);
5591 t2 = extract_muldiv (op1, c, code, wide_type);
5592 if (t1 != 0 && t2 != 0
5593 && (code == MULT_EXPR
5594 /* If not multiplication, we can only do this if both operands
5595 are divisible by c. */
5596 || (multiple_of_p (ctype, op0, c)
5597 && multiple_of_p (ctype, op1, c))))
5598 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5599 fold_convert (ctype, t2));
5601 /* If this was a subtraction, negate OP1 and set it to be an addition.
5602 This simplifies the logic below. */
5603 if (tcode == MINUS_EXPR)
5604 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5606 if (TREE_CODE (op1) != INTEGER_CST)
5609 /* If either OP1 or C are negative, this optimization is not safe for
5610 some of the division and remainder types while for others we need
5611 to change the code. */
5612 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5614 if (code == CEIL_DIV_EXPR)
5615 code = FLOOR_DIV_EXPR;
5616 else if (code == FLOOR_DIV_EXPR)
5617 code = CEIL_DIV_EXPR;
5618 else if (code != MULT_EXPR
5619 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5623 /* If it's a multiply or a division/modulus operation of a multiple
5624 of our constant, do the operation and verify it doesn't overflow. */
5625 if (code == MULT_EXPR
5626 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5628 op1 = const_binop (code, fold_convert (ctype, op1),
5629 fold_convert (ctype, c), 0);
5630 /* We allow the constant to overflow with wrapping semantics. */
5632 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5638 /* If we have an unsigned type is not a sizetype, we cannot widen
5639 the operation since it will change the result if the original
5640 computation overflowed. */
5641 if (TYPE_UNSIGNED (ctype)
5642 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5646 /* If we were able to eliminate our operation from the first side,
5647 apply our operation to the second side and reform the PLUS. */
5648 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5649 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5651 /* The last case is if we are a multiply. In that case, we can
5652 apply the distributive law to commute the multiply and addition
5653 if the multiplication of the constants doesn't overflow. */
5654 if (code == MULT_EXPR)
5655 return fold_build2 (tcode, ctype,
5656 fold_build2 (code, ctype,
5657 fold_convert (ctype, op0),
5658 fold_convert (ctype, c)),
5664 /* We have a special case here if we are doing something like
5665 (C * 8) % 4 since we know that's zero. */
5666 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5667 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5668 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5669 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5670 return omit_one_operand (type, integer_zero_node, op0);
5672 /* ... fall through ... */
5674 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5675 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5676 /* If we can extract our operation from the LHS, do so and return a
5677 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5678 do something only if the second operand is a constant. */
5680 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5681 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5682 fold_convert (ctype, op1));
5683 else if (tcode == MULT_EXPR && code == MULT_EXPR
5684 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5685 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5686 fold_convert (ctype, t1));
5687 else if (TREE_CODE (op1) != INTEGER_CST)
5690 /* If these are the same operation types, we can associate them
5691 assuming no overflow. */
5693 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5694 fold_convert (ctype, c), 0))
5695 && !TREE_OVERFLOW (t1))
5696 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5698 /* If these operations "cancel" each other, we have the main
5699 optimizations of this pass, which occur when either constant is a
5700 multiple of the other, in which case we replace this with either an
5701 operation or CODE or TCODE.
5703 If we have an unsigned type that is not a sizetype, we cannot do
5704 this since it will change the result if the original computation
5706 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5707 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5708 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5709 || (tcode == MULT_EXPR
5710 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5711 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5713 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5714 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5715 fold_convert (ctype,
5716 const_binop (TRUNC_DIV_EXPR,
5718 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5719 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5720 fold_convert (ctype,
5721 const_binop (TRUNC_DIV_EXPR,
5733 /* Return a node which has the indicated constant VALUE (either 0 or
5734 1), and is of the indicated TYPE. */
5737 constant_boolean_node (int value, tree type)
5739 if (type == integer_type_node)
5740 return value ? integer_one_node : integer_zero_node;
5741 else if (type == boolean_type_node)
5742 return value ? boolean_true_node : boolean_false_node;
5744 return build_int_cst (type, value);
5748 /* Return true if expr looks like an ARRAY_REF and set base and
5749 offset to the appropriate trees. If there is no offset,
5750 offset is set to NULL_TREE. Base will be canonicalized to
5751 something you can get the element type from using
5752 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5753 in bytes to the base. */
5756 extract_array_ref (tree expr, tree *base, tree *offset)
5758 /* One canonical form is a PLUS_EXPR with the first
5759 argument being an ADDR_EXPR with a possible NOP_EXPR
5761 if (TREE_CODE (expr) == PLUS_EXPR)
5763 tree op0 = TREE_OPERAND (expr, 0);
5764 tree inner_base, dummy1;
5765 /* Strip NOP_EXPRs here because the C frontends and/or
5766 folders present us (int *)&x.a + 4B possibly. */
5768 if (extract_array_ref (op0, &inner_base, &dummy1))
5771 if (dummy1 == NULL_TREE)
5772 *offset = TREE_OPERAND (expr, 1);
5774 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5775 dummy1, TREE_OPERAND (expr, 1));
5779 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5780 which we transform into an ADDR_EXPR with appropriate
5781 offset. For other arguments to the ADDR_EXPR we assume
5782 zero offset and as such do not care about the ADDR_EXPR
5783 type and strip possible nops from it. */
5784 else if (TREE_CODE (expr) == ADDR_EXPR)
5786 tree op0 = TREE_OPERAND (expr, 0);
5787 if (TREE_CODE (op0) == ARRAY_REF)
5789 tree idx = TREE_OPERAND (op0, 1);
5790 *base = TREE_OPERAND (op0, 0);
5791 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5792 array_ref_element_size (op0));
5796 /* Handle array-to-pointer decay as &a. */
5797 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5798 *base = TREE_OPERAND (expr, 0);
5801 *offset = NULL_TREE;
5805 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5806 else if (SSA_VAR_P (expr)
5807 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5810 *offset = NULL_TREE;
5818 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5819 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5820 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5821 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5822 COND is the first argument to CODE; otherwise (as in the example
5823 given here), it is the second argument. TYPE is the type of the
5824 original expression. Return NULL_TREE if no simplification is
5828 fold_binary_op_with_conditional_arg (enum tree_code code,
5829 tree type, tree op0, tree op1,
5830 tree cond, tree arg, int cond_first_p)
5832 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5833 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5834 tree test, true_value, false_value;
5835 tree lhs = NULL_TREE;
5836 tree rhs = NULL_TREE;
5838 /* This transformation is only worthwhile if we don't have to wrap
5839 arg in a SAVE_EXPR, and the operation can be simplified on at least
5840 one of the branches once its pushed inside the COND_EXPR. */
5841 if (!TREE_CONSTANT (arg))
5844 if (TREE_CODE (cond) == COND_EXPR)
5846 test = TREE_OPERAND (cond, 0);
5847 true_value = TREE_OPERAND (cond, 1);
5848 false_value = TREE_OPERAND (cond, 2);
5849 /* If this operand throws an expression, then it does not make
5850 sense to try to perform a logical or arithmetic operation
5852 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5854 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5859 tree testtype = TREE_TYPE (cond);
5861 true_value = constant_boolean_node (true, testtype);
5862 false_value = constant_boolean_node (false, testtype);
5865 arg = fold_convert (arg_type, arg);
5868 true_value = fold_convert (cond_type, true_value);
5870 lhs = fold_build2 (code, type, true_value, arg);
5872 lhs = fold_build2 (code, type, arg, true_value);
5876 false_value = fold_convert (cond_type, false_value);
5878 rhs = fold_build2 (code, type, false_value, arg);
5880 rhs = fold_build2 (code, type, arg, false_value);
5883 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5884 return fold_convert (type, test);
5888 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5890 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5891 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5892 ADDEND is the same as X.
5894 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5895 and finite. The problematic cases are when X is zero, and its mode
5896 has signed zeros. In the case of rounding towards -infinity,
5897 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5898 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5901 fold_real_zero_addition_p (tree type, tree addend, int negate)
5903 if (!real_zerop (addend))
5906 /* Don't allow the fold with -fsignaling-nans. */
5907 if (HONOR_SNANS (TYPE_MODE (type)))
5910 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5911 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5914 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5915 if (TREE_CODE (addend) == REAL_CST
5916 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5919 /* The mode has signed zeros, and we have to honor their sign.
5920 In this situation, there is only one case we can return true for.
5921 X - 0 is the same as X unless rounding towards -infinity is
5923 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5926 /* Subroutine of fold() that checks comparisons of built-in math
5927 functions against real constants.
5929 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5930 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5931 is the type of the result and ARG0 and ARG1 are the operands of the
5932 comparison. ARG1 must be a TREE_REAL_CST.
5934 The function returns the constant folded tree if a simplification
5935 can be made, and NULL_TREE otherwise. */
5938 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5939 tree type, tree arg0, tree arg1)
5943 if (BUILTIN_SQRT_P (fcode))
5945 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5946 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5948 c = TREE_REAL_CST (arg1);
5949 if (REAL_VALUE_NEGATIVE (c))
5951 /* sqrt(x) < y is always false, if y is negative. */
5952 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5953 return omit_one_operand (type, integer_zero_node, arg);
5955 /* sqrt(x) > y is always true, if y is negative and we
5956 don't care about NaNs, i.e. negative values of x. */
5957 if (code == NE_EXPR || !HONOR_NANS (mode))
5958 return omit_one_operand (type, integer_one_node, arg);
5960 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5961 return fold_build2 (GE_EXPR, type, arg,
5962 build_real (TREE_TYPE (arg), dconst0));
5964 else if (code == GT_EXPR || code == GE_EXPR)
5968 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5969 real_convert (&c2, mode, &c2);
5971 if (REAL_VALUE_ISINF (c2))
5973 /* sqrt(x) > y is x == +Inf, when y is very large. */
5974 if (HONOR_INFINITIES (mode))
5975 return fold_build2 (EQ_EXPR, type, arg,
5976 build_real (TREE_TYPE (arg), c2));
5978 /* sqrt(x) > y is always false, when y is very large
5979 and we don't care about infinities. */
5980 return omit_one_operand (type, integer_zero_node, arg);
5983 /* sqrt(x) > c is the same as x > c*c. */
5984 return fold_build2 (code, type, arg,
5985 build_real (TREE_TYPE (arg), c2));
5987 else if (code == LT_EXPR || code == LE_EXPR)
5991 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5992 real_convert (&c2, mode, &c2);
5994 if (REAL_VALUE_ISINF (c2))
5996 /* sqrt(x) < y is always true, when y is a very large
5997 value and we don't care about NaNs or Infinities. */
5998 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5999 return omit_one_operand (type, integer_one_node, arg);
6001 /* sqrt(x) < y is x != +Inf when y is very large and we
6002 don't care about NaNs. */
6003 if (! HONOR_NANS (mode))
6004 return fold_build2 (NE_EXPR, type, arg,
6005 build_real (TREE_TYPE (arg), c2));
6007 /* sqrt(x) < y is x >= 0 when y is very large and we
6008 don't care about Infinities. */
6009 if (! HONOR_INFINITIES (mode))
6010 return fold_build2 (GE_EXPR, type, arg,
6011 build_real (TREE_TYPE (arg), dconst0));
6013 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6014 if (lang_hooks.decls.global_bindings_p () != 0
6015 || CONTAINS_PLACEHOLDER_P (arg))
6018 arg = save_expr (arg);
6019 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6020 fold_build2 (GE_EXPR, type, arg,
6021 build_real (TREE_TYPE (arg),
6023 fold_build2 (NE_EXPR, type, arg,
6024 build_real (TREE_TYPE (arg),
6028 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6029 if (! HONOR_NANS (mode))
6030 return fold_build2 (code, type, arg,
6031 build_real (TREE_TYPE (arg), c2));
6033 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6034 if (lang_hooks.decls.global_bindings_p () == 0
6035 && ! CONTAINS_PLACEHOLDER_P (arg))
6037 arg = save_expr (arg);
6038 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6039 fold_build2 (GE_EXPR, type, arg,
6040 build_real (TREE_TYPE (arg),
6042 fold_build2 (code, type, arg,
6043 build_real (TREE_TYPE (arg),
6052 /* Subroutine of fold() that optimizes comparisons against Infinities,
6053 either +Inf or -Inf.
6055 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6056 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6057 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6059 The function returns the constant folded tree if a simplification
6060 can be made, and NULL_TREE otherwise. */
6063 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6065 enum machine_mode mode;
6066 REAL_VALUE_TYPE max;
6070 mode = TYPE_MODE (TREE_TYPE (arg0));
6072 /* For negative infinity swap the sense of the comparison. */
6073 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6075 code = swap_tree_comparison (code);
6080 /* x > +Inf is always false, if with ignore sNANs. */
6081 if (HONOR_SNANS (mode))
6083 return omit_one_operand (type, integer_zero_node, arg0);
6086 /* x <= +Inf is always true, if we don't case about NaNs. */
6087 if (! HONOR_NANS (mode))
6088 return omit_one_operand (type, integer_one_node, arg0);
6090 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6091 if (lang_hooks.decls.global_bindings_p () == 0
6092 && ! CONTAINS_PLACEHOLDER_P (arg0))
6094 arg0 = save_expr (arg0);
6095 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6101 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6102 real_maxval (&max, neg, mode);
6103 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6104 arg0, build_real (TREE_TYPE (arg0), max));
6107 /* x < +Inf is always equal to x <= DBL_MAX. */
6108 real_maxval (&max, neg, mode);
6109 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6110 arg0, build_real (TREE_TYPE (arg0), max));
6113 /* x != +Inf is always equal to !(x > DBL_MAX). */
6114 real_maxval (&max, neg, mode);
6115 if (! HONOR_NANS (mode))
6116 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6117 arg0, build_real (TREE_TYPE (arg0), max));
6119 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6120 arg0, build_real (TREE_TYPE (arg0), max));
6121 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6130 /* Subroutine of fold() that optimizes comparisons of a division by
6131 a nonzero integer constant against an integer constant, i.e.
6134 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6135 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6136 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6138 The function returns the constant folded tree if a simplification
6139 can be made, and NULL_TREE otherwise. */
6142 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6144 tree prod, tmp, hi, lo;
6145 tree arg00 = TREE_OPERAND (arg0, 0);
6146 tree arg01 = TREE_OPERAND (arg0, 1);
6147 unsigned HOST_WIDE_INT lpart;
6148 HOST_WIDE_INT hpart;
6149 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6153 /* We have to do this the hard way to detect unsigned overflow.
6154 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6155 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6156 TREE_INT_CST_HIGH (arg01),
6157 TREE_INT_CST_LOW (arg1),
6158 TREE_INT_CST_HIGH (arg1),
6159 &lpart, &hpart, unsigned_p);
6160 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6162 neg_overflow = false;
6166 tmp = int_const_binop (MINUS_EXPR, arg01,
6167 build_int_cst (TREE_TYPE (arg01), 1), 0);
6170 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6171 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6172 TREE_INT_CST_HIGH (prod),
6173 TREE_INT_CST_LOW (tmp),
6174 TREE_INT_CST_HIGH (tmp),
6175 &lpart, &hpart, unsigned_p);
6176 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6177 -1, overflow | TREE_OVERFLOW (prod));
6179 else if (tree_int_cst_sgn (arg01) >= 0)
6181 tmp = int_const_binop (MINUS_EXPR, arg01,
6182 build_int_cst (TREE_TYPE (arg01), 1), 0);
6183 switch (tree_int_cst_sgn (arg1))
6186 neg_overflow = true;
6187 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6192 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6197 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6207 /* A negative divisor reverses the relational operators. */
6208 code = swap_tree_comparison (code);
6210 tmp = int_const_binop (PLUS_EXPR, arg01,
6211 build_int_cst (TREE_TYPE (arg01), 1), 0);
6212 switch (tree_int_cst_sgn (arg1))
6215 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6220 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6225 neg_overflow = true;
6226 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6238 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6239 return omit_one_operand (type, integer_zero_node, arg00);
6240 if (TREE_OVERFLOW (hi))
6241 return fold_build2 (GE_EXPR, type, arg00, lo);
6242 if (TREE_OVERFLOW (lo))
6243 return fold_build2 (LE_EXPR, type, arg00, hi);
6244 return build_range_check (type, arg00, 1, lo, hi);
6247 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6248 return omit_one_operand (type, integer_one_node, arg00);
6249 if (TREE_OVERFLOW (hi))
6250 return fold_build2 (LT_EXPR, type, arg00, lo);
6251 if (TREE_OVERFLOW (lo))
6252 return fold_build2 (GT_EXPR, type, arg00, hi);
6253 return build_range_check (type, arg00, 0, lo, hi);
6256 if (TREE_OVERFLOW (lo))
6258 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6259 return omit_one_operand (type, tmp, arg00);
6261 return fold_build2 (LT_EXPR, type, arg00, lo);
6264 if (TREE_OVERFLOW (hi))
6266 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6267 return omit_one_operand (type, tmp, arg00);
6269 return fold_build2 (LE_EXPR, type, arg00, hi);
6272 if (TREE_OVERFLOW (hi))
6274 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6275 return omit_one_operand (type, tmp, arg00);
6277 return fold_build2 (GT_EXPR, type, arg00, hi);
6280 if (TREE_OVERFLOW (lo))
6282 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6283 return omit_one_operand (type, tmp, arg00);
6285 return fold_build2 (GE_EXPR, type, arg00, lo);
6295 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6296 equality/inequality test, then return a simplified form of the test
6297 using a sign testing. Otherwise return NULL. TYPE is the desired
6301 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6304 /* If this is testing a single bit, we can optimize the test. */
6305 if ((code == NE_EXPR || code == EQ_EXPR)
6306 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6307 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6309 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6310 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6311 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6313 if (arg00 != NULL_TREE
6314 /* This is only a win if casting to a signed type is cheap,
6315 i.e. when arg00's type is not a partial mode. */
6316 && TYPE_PRECISION (TREE_TYPE (arg00))
6317 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6319 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6320 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6321 result_type, fold_convert (stype, arg00),
6322 build_int_cst (stype, 0));
6329 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6330 equality/inequality test, then return a simplified form of
6331 the test using shifts and logical operations. Otherwise return
6332 NULL. TYPE is the desired result type. */
6335 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6338 /* If this is testing a single bit, we can optimize the test. */
6339 if ((code == NE_EXPR || code == EQ_EXPR)
6340 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6341 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6343 tree inner = TREE_OPERAND (arg0, 0);
6344 tree type = TREE_TYPE (arg0);
6345 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6346 enum machine_mode operand_mode = TYPE_MODE (type);
6348 tree signed_type, unsigned_type, intermediate_type;
6351 /* First, see if we can fold the single bit test into a sign-bit
6353 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6358 /* Otherwise we have (A & C) != 0 where C is a single bit,
6359 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6360 Similarly for (A & C) == 0. */
6362 /* If INNER is a right shift of a constant and it plus BITNUM does
6363 not overflow, adjust BITNUM and INNER. */
6364 if (TREE_CODE (inner) == RSHIFT_EXPR
6365 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6366 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6367 && bitnum < TYPE_PRECISION (type)
6368 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6369 bitnum - TYPE_PRECISION (type)))
6371 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6372 inner = TREE_OPERAND (inner, 0);
6375 /* If we are going to be able to omit the AND below, we must do our
6376 operations as unsigned. If we must use the AND, we have a choice.
6377 Normally unsigned is faster, but for some machines signed is. */
6378 #ifdef LOAD_EXTEND_OP
6379 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6380 && !flag_syntax_only) ? 0 : 1;
6385 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6386 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6387 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6388 inner = fold_convert (intermediate_type, inner);
6391 inner = build2 (RSHIFT_EXPR, intermediate_type,
6392 inner, size_int (bitnum));
6394 one = build_int_cst (intermediate_type, 1);
6396 if (code == EQ_EXPR)
6397 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6399 /* Put the AND last so it can combine with more things. */
6400 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6402 /* Make sure to return the proper type. */
6403 inner = fold_convert (result_type, inner);
6410 /* Check whether we are allowed to reorder operands arg0 and arg1,
6411 such that the evaluation of arg1 occurs before arg0. */
6414 reorder_operands_p (tree arg0, tree arg1)
6416 if (! flag_evaluation_order)
6418 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6420 return ! TREE_SIDE_EFFECTS (arg0)
6421 && ! TREE_SIDE_EFFECTS (arg1);
6424 /* Test whether it is preferable two swap two operands, ARG0 and
6425 ARG1, for example because ARG0 is an integer constant and ARG1
6426 isn't. If REORDER is true, only recommend swapping if we can
6427 evaluate the operands in reverse order. */
6430 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6432 STRIP_SIGN_NOPS (arg0);
6433 STRIP_SIGN_NOPS (arg1);
6435 if (TREE_CODE (arg1) == INTEGER_CST)
6437 if (TREE_CODE (arg0) == INTEGER_CST)
6440 if (TREE_CODE (arg1) == REAL_CST)
6442 if (TREE_CODE (arg0) == REAL_CST)
6445 if (TREE_CODE (arg1) == COMPLEX_CST)
6447 if (TREE_CODE (arg0) == COMPLEX_CST)
6450 if (TREE_CONSTANT (arg1))
6452 if (TREE_CONSTANT (arg0))
6458 if (reorder && flag_evaluation_order
6459 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6467 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6468 for commutative and comparison operators. Ensuring a canonical
6469 form allows the optimizers to find additional redundancies without
6470 having to explicitly check for both orderings. */
6471 if (TREE_CODE (arg0) == SSA_NAME
6472 && TREE_CODE (arg1) == SSA_NAME
6473 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6479 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6480 ARG0 is extended to a wider type. */
6483 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6485 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6487 tree shorter_type, outer_type;
6491 if (arg0_unw == arg0)
6493 shorter_type = TREE_TYPE (arg0_unw);
6495 #ifdef HAVE_canonicalize_funcptr_for_compare
6496 /* Disable this optimization if we're casting a function pointer
6497 type on targets that require function pointer canonicalization. */
6498 if (HAVE_canonicalize_funcptr_for_compare
6499 && TREE_CODE (shorter_type) == POINTER_TYPE
6500 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6504 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6507 arg1_unw = get_unwidened (arg1, shorter_type);
6509 /* If possible, express the comparison in the shorter mode. */
6510 if ((code == EQ_EXPR || code == NE_EXPR
6511 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6512 && (TREE_TYPE (arg1_unw) == shorter_type
6513 || (TREE_CODE (arg1_unw) == INTEGER_CST
6514 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6515 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6516 && int_fits_type_p (arg1_unw, shorter_type))))
6517 return fold_build2 (code, type, arg0_unw,
6518 fold_convert (shorter_type, arg1_unw));
6520 if (TREE_CODE (arg1_unw) != INTEGER_CST
6521 || TREE_CODE (shorter_type) != INTEGER_TYPE
6522 || !int_fits_type_p (arg1_unw, shorter_type))
6525 /* If we are comparing with the integer that does not fit into the range
6526 of the shorter type, the result is known. */
6527 outer_type = TREE_TYPE (arg1_unw);
6528 min = lower_bound_in_type (outer_type, shorter_type);
6529 max = upper_bound_in_type (outer_type, shorter_type);
6531 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6533 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6540 return omit_one_operand (type, integer_zero_node, arg0);
6545 return omit_one_operand (type, integer_one_node, arg0);
6551 return omit_one_operand (type, integer_one_node, arg0);
6553 return omit_one_operand (type, integer_zero_node, arg0);
6558 return omit_one_operand (type, integer_zero_node, arg0);
6560 return omit_one_operand (type, integer_one_node, arg0);
6569 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6570 ARG0 just the signedness is changed. */
6573 fold_sign_changed_comparison (enum tree_code code, tree type,
6574 tree arg0, tree arg1)
6577 tree inner_type, outer_type;
6579 if (TREE_CODE (arg0) != NOP_EXPR
6580 && TREE_CODE (arg0) != CONVERT_EXPR)
6583 outer_type = TREE_TYPE (arg0);
6584 arg0_inner = TREE_OPERAND (arg0, 0);
6585 inner_type = TREE_TYPE (arg0_inner);
6587 #ifdef HAVE_canonicalize_funcptr_for_compare
6588 /* Disable this optimization if we're casting a function pointer
6589 type on targets that require function pointer canonicalization. */
6590 if (HAVE_canonicalize_funcptr_for_compare
6591 && TREE_CODE (inner_type) == POINTER_TYPE
6592 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6596 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6599 if (TREE_CODE (arg1) != INTEGER_CST
6600 && !((TREE_CODE (arg1) == NOP_EXPR
6601 || TREE_CODE (arg1) == CONVERT_EXPR)
6602 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6605 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6610 if (TREE_CODE (arg1) == INTEGER_CST)
6611 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6612 TREE_INT_CST_HIGH (arg1), 0,
6613 TREE_OVERFLOW (arg1));
6615 arg1 = fold_convert (inner_type, arg1);
6617 return fold_build2 (code, type, arg0_inner, arg1);
6620 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6621 step of the array. Reconstructs s and delta in the case of s * delta
6622 being an integer constant (and thus already folded).
6623 ADDR is the address. MULT is the multiplicative expression.
6624 If the function succeeds, the new address expression is returned. Otherwise
6625 NULL_TREE is returned. */
6628 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6630 tree s, delta, step;
6631 tree ref = TREE_OPERAND (addr, 0), pref;
6635 /* Canonicalize op1 into a possibly non-constant delta
6636 and an INTEGER_CST s. */
6637 if (TREE_CODE (op1) == MULT_EXPR)
6639 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6644 if (TREE_CODE (arg0) == INTEGER_CST)
6649 else if (TREE_CODE (arg1) == INTEGER_CST)
6657 else if (TREE_CODE (op1) == INTEGER_CST)
6664 /* Simulate we are delta * 1. */
6666 s = integer_one_node;
6669 for (;; ref = TREE_OPERAND (ref, 0))
6671 if (TREE_CODE (ref) == ARRAY_REF)
6673 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6677 step = array_ref_element_size (ref);
6678 if (TREE_CODE (step) != INTEGER_CST)
6683 if (! tree_int_cst_equal (step, s))
6688 /* Try if delta is a multiple of step. */
6689 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6698 if (!handled_component_p (ref))
6702 /* We found the suitable array reference. So copy everything up to it,
6703 and replace the index. */
6705 pref = TREE_OPERAND (addr, 0);
6706 ret = copy_node (pref);
6711 pref = TREE_OPERAND (pref, 0);
6712 TREE_OPERAND (pos, 0) = copy_node (pref);
6713 pos = TREE_OPERAND (pos, 0);
6716 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6717 fold_convert (itype,
6718 TREE_OPERAND (pos, 1)),
6719 fold_convert (itype, delta));
6721 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6725 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6726 means A >= Y && A != MAX, but in this case we know that
6727 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6730 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6732 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6734 if (TREE_CODE (bound) == LT_EXPR)
6735 a = TREE_OPERAND (bound, 0);
6736 else if (TREE_CODE (bound) == GT_EXPR)
6737 a = TREE_OPERAND (bound, 1);
6741 typea = TREE_TYPE (a);
6742 if (!INTEGRAL_TYPE_P (typea)
6743 && !POINTER_TYPE_P (typea))
6746 if (TREE_CODE (ineq) == LT_EXPR)
6748 a1 = TREE_OPERAND (ineq, 1);
6749 y = TREE_OPERAND (ineq, 0);
6751 else if (TREE_CODE (ineq) == GT_EXPR)
6753 a1 = TREE_OPERAND (ineq, 0);
6754 y = TREE_OPERAND (ineq, 1);
6759 if (TREE_TYPE (a1) != typea)
6762 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6763 if (!integer_onep (diff))
6766 return fold_build2 (GE_EXPR, type, a, y);
6769 /* Fold a sum or difference of at least one multiplication.
6770 Returns the folded tree or NULL if no simplification could be made. */
6773 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6775 tree arg00, arg01, arg10, arg11;
6776 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6778 /* (A * C) +- (B * C) -> (A+-B) * C.
6779 (A * C) +- A -> A * (C+-1).
6780 We are most concerned about the case where C is a constant,
6781 but other combinations show up during loop reduction. Since
6782 it is not difficult, try all four possibilities. */
6784 if (TREE_CODE (arg0) == MULT_EXPR)
6786 arg00 = TREE_OPERAND (arg0, 0);
6787 arg01 = TREE_OPERAND (arg0, 1);
6792 arg01 = build_one_cst (type);
6794 if (TREE_CODE (arg1) == MULT_EXPR)
6796 arg10 = TREE_OPERAND (arg1, 0);
6797 arg11 = TREE_OPERAND (arg1, 1);
6802 arg11 = build_one_cst (type);
6806 if (operand_equal_p (arg01, arg11, 0))
6807 same = arg01, alt0 = arg00, alt1 = arg10;
6808 else if (operand_equal_p (arg00, arg10, 0))
6809 same = arg00, alt0 = arg01, alt1 = arg11;
6810 else if (operand_equal_p (arg00, arg11, 0))
6811 same = arg00, alt0 = arg01, alt1 = arg10;
6812 else if (operand_equal_p (arg01, arg10, 0))
6813 same = arg01, alt0 = arg00, alt1 = arg11;
6815 /* No identical multiplicands; see if we can find a common
6816 power-of-two factor in non-power-of-two multiplies. This
6817 can help in multi-dimensional array access. */
6818 else if (host_integerp (arg01, 0)
6819 && host_integerp (arg11, 0))
6821 HOST_WIDE_INT int01, int11, tmp;
6824 int01 = TREE_INT_CST_LOW (arg01);
6825 int11 = TREE_INT_CST_LOW (arg11);
6827 /* Move min of absolute values to int11. */
6828 if ((int01 >= 0 ? int01 : -int01)
6829 < (int11 >= 0 ? int11 : -int11))
6831 tmp = int01, int01 = int11, int11 = tmp;
6832 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6839 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6841 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6842 build_int_cst (TREE_TYPE (arg00),
6847 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6852 return fold_build2 (MULT_EXPR, type,
6853 fold_build2 (code, type,
6854 fold_convert (type, alt0),
6855 fold_convert (type, alt1)),
6856 fold_convert (type, same));
6861 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6862 specified by EXPR into the buffer PTR of length LEN bytes.
6863 Return the number of bytes placed in the buffer, or zero
6867 native_encode_int (tree expr, unsigned char *ptr, int len)
6869 tree type = TREE_TYPE (expr);
6870 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6871 int byte, offset, word, words;
6872 unsigned char value;
6874 if (total_bytes > len)
6876 words = total_bytes / UNITS_PER_WORD;
6878 for (byte = 0; byte < total_bytes; byte++)
6880 int bitpos = byte * BITS_PER_UNIT;
6881 if (bitpos < HOST_BITS_PER_WIDE_INT)
6882 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6884 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6885 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6887 if (total_bytes > UNITS_PER_WORD)
6889 word = byte / UNITS_PER_WORD;
6890 if (WORDS_BIG_ENDIAN)
6891 word = (words - 1) - word;
6892 offset = word * UNITS_PER_WORD;
6893 if (BYTES_BIG_ENDIAN)
6894 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6896 offset += byte % UNITS_PER_WORD;
6899 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6900 ptr[offset] = value;
6906 /* Subroutine of native_encode_expr. Encode the REAL_CST
6907 specified by EXPR into the buffer PTR of length LEN bytes.
6908 Return the number of bytes placed in the buffer, or zero
6912 native_encode_real (tree expr, unsigned char *ptr, int len)
6914 tree type = TREE_TYPE (expr);
6915 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6916 int byte, offset, word, words;
6917 unsigned char value;
6919 /* There are always 32 bits in each long, no matter the size of
6920 the hosts long. We handle floating point representations with
6924 if (total_bytes > len)
6926 words = total_bytes / UNITS_PER_WORD;
6928 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6930 for (byte = 0; byte < total_bytes; byte++)
6932 int bitpos = byte * BITS_PER_UNIT;
6933 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6935 if (total_bytes > UNITS_PER_WORD)
6937 word = byte / UNITS_PER_WORD;
6938 if (FLOAT_WORDS_BIG_ENDIAN)
6939 word = (words - 1) - word;
6940 offset = word * UNITS_PER_WORD;
6941 if (BYTES_BIG_ENDIAN)
6942 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6944 offset += byte % UNITS_PER_WORD;
6947 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6948 ptr[offset] = value;
6953 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6954 specified by EXPR into the buffer PTR of length LEN bytes.
6955 Return the number of bytes placed in the buffer, or zero
6959 native_encode_complex (tree expr, unsigned char *ptr, int len)
6964 part = TREE_REALPART (expr);
6965 rsize = native_encode_expr (part, ptr, len);
6968 part = TREE_IMAGPART (expr);
6969 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6972 return rsize + isize;
6976 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6977 specified by EXPR into the buffer PTR of length LEN bytes.
6978 Return the number of bytes placed in the buffer, or zero
6982 native_encode_vector (tree expr, unsigned char *ptr, int len)
6984 int i, size, offset, count;
6985 tree itype, elem, elements;
6988 elements = TREE_VECTOR_CST_ELTS (expr);
6989 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6990 itype = TREE_TYPE (TREE_TYPE (expr));
6991 size = GET_MODE_SIZE (TYPE_MODE (itype));
6992 for (i = 0; i < count; i++)
6996 elem = TREE_VALUE (elements);
6997 elements = TREE_CHAIN (elements);
7004 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7009 if (offset + size > len)
7011 memset (ptr+offset, 0, size);
7019 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7020 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7021 buffer PTR of length LEN bytes. Return the number of bytes
7022 placed in the buffer, or zero upon failure. */
7025 native_encode_expr (tree expr, unsigned char *ptr, int len)
7027 switch (TREE_CODE (expr))
7030 return native_encode_int (expr, ptr, len);
7033 return native_encode_real (expr, ptr, len);
7036 return native_encode_complex (expr, ptr, len);
7039 return native_encode_vector (expr, ptr, len);
7047 /* Subroutine of native_interpret_expr. Interpret the contents of
7048 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7049 If the buffer cannot be interpreted, return NULL_TREE. */
7052 native_interpret_int (tree type, unsigned char *ptr, int len)
7054 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7055 int byte, offset, word, words;
7056 unsigned char value;
7057 unsigned int HOST_WIDE_INT lo = 0;
7058 HOST_WIDE_INT hi = 0;
7060 if (total_bytes > len)
7062 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7064 words = total_bytes / UNITS_PER_WORD;
7066 for (byte = 0; byte < total_bytes; byte++)
7068 int bitpos = byte * BITS_PER_UNIT;
7069 if (total_bytes > UNITS_PER_WORD)
7071 word = byte / UNITS_PER_WORD;
7072 if (WORDS_BIG_ENDIAN)
7073 word = (words - 1) - word;
7074 offset = word * UNITS_PER_WORD;
7075 if (BYTES_BIG_ENDIAN)
7076 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7078 offset += byte % UNITS_PER_WORD;
7081 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7082 value = ptr[offset];
7084 if (bitpos < HOST_BITS_PER_WIDE_INT)
7085 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7087 hi |= (unsigned HOST_WIDE_INT) value
7088 << (bitpos - HOST_BITS_PER_WIDE_INT);
7091 return build_int_cst_wide_type (type, lo, hi);
7095 /* Subroutine of native_interpret_expr. Interpret the contents of
7096 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7097 If the buffer cannot be interpreted, return NULL_TREE. */
7100 native_interpret_real (tree type, unsigned char *ptr, int len)
7102 enum machine_mode mode = TYPE_MODE (type);
7103 int total_bytes = GET_MODE_SIZE (mode);
7104 int byte, offset, word, words;
7105 unsigned char value;
7106 /* There are always 32 bits in each long, no matter the size of
7107 the hosts long. We handle floating point representations with
7112 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7113 if (total_bytes > len || total_bytes > 24)
7115 words = total_bytes / UNITS_PER_WORD;
7117 memset (tmp, 0, sizeof (tmp));
7118 for (byte = 0; byte < total_bytes; byte++)
7120 int bitpos = byte * BITS_PER_UNIT;
7121 if (total_bytes > UNITS_PER_WORD)
7123 word = byte / UNITS_PER_WORD;
7124 if (FLOAT_WORDS_BIG_ENDIAN)
7125 word = (words - 1) - word;
7126 offset = word * UNITS_PER_WORD;
7127 if (BYTES_BIG_ENDIAN)
7128 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7130 offset += byte % UNITS_PER_WORD;
7133 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7134 value = ptr[offset];
7136 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7139 real_from_target (&r, tmp, mode);
7140 return build_real (type, r);
7144 /* Subroutine of native_interpret_expr. Interpret the contents of
7145 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7146 If the buffer cannot be interpreted, return NULL_TREE. */
7149 native_interpret_complex (tree type, unsigned char *ptr, int len)
7151 tree etype, rpart, ipart;
7154 etype = TREE_TYPE (type);
7155 size = GET_MODE_SIZE (TYPE_MODE (etype));
7158 rpart = native_interpret_expr (etype, ptr, size);
7161 ipart = native_interpret_expr (etype, ptr+size, size);
7164 return build_complex (type, rpart, ipart);
7168 /* Subroutine of native_interpret_expr. Interpret the contents of
7169 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7170 If the buffer cannot be interpreted, return NULL_TREE. */
7173 native_interpret_vector (tree type, unsigned char *ptr, int len)
7175 tree etype, elem, elements;
7178 etype = TREE_TYPE (type);
7179 size = GET_MODE_SIZE (TYPE_MODE (etype));
7180 count = TYPE_VECTOR_SUBPARTS (type);
7181 if (size * count > len)
7184 elements = NULL_TREE;
7185 for (i = count - 1; i >= 0; i--)
7187 elem = native_interpret_expr (etype, ptr+(i*size), size);
7190 elements = tree_cons (NULL_TREE, elem, elements);
7192 return build_vector (type, elements);
7196 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7197 the buffer PTR of length LEN as a constant of type TYPE. For
7198 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7199 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7200 return NULL_TREE. */
7203 native_interpret_expr (tree type, unsigned char *ptr, int len)
7205 switch (TREE_CODE (type))
7210 return native_interpret_int (type, ptr, len);
7213 return native_interpret_real (type, ptr, len);
7216 return native_interpret_complex (type, ptr, len);
7219 return native_interpret_vector (type, ptr, len);
7227 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7228 TYPE at compile-time. If we're unable to perform the conversion
7229 return NULL_TREE. */
7232 fold_view_convert_expr (tree type, tree expr)
7234 /* We support up to 512-bit values (for V8DFmode). */
7235 unsigned char buffer[64];
7238 /* Check that the host and target are sane. */
7239 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7242 len = native_encode_expr (expr, buffer, sizeof (buffer));
7246 return native_interpret_expr (type, buffer, len);
7250 /* Fold a unary expression of code CODE and type TYPE with operand
7251 OP0. Return the folded expression if folding is successful.
7252 Otherwise, return NULL_TREE. */
7255 fold_unary (enum tree_code code, tree type, tree op0)
7259 enum tree_code_class kind = TREE_CODE_CLASS (code);
7261 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7262 && TREE_CODE_LENGTH (code) == 1);
7267 if (code == NOP_EXPR || code == CONVERT_EXPR
7268 || code == FLOAT_EXPR || code == ABS_EXPR)
7270 /* Don't use STRIP_NOPS, because signedness of argument type
7272 STRIP_SIGN_NOPS (arg0);
7276 /* Strip any conversions that don't change the mode. This
7277 is safe for every expression, except for a comparison
7278 expression because its signedness is derived from its
7281 Note that this is done as an internal manipulation within
7282 the constant folder, in order to find the simplest
7283 representation of the arguments so that their form can be
7284 studied. In any cases, the appropriate type conversions
7285 should be put back in the tree that will get out of the
7291 if (TREE_CODE_CLASS (code) == tcc_unary)
7293 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7294 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7295 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7296 else if (TREE_CODE (arg0) == COND_EXPR)
7298 tree arg01 = TREE_OPERAND (arg0, 1);
7299 tree arg02 = TREE_OPERAND (arg0, 2);
7300 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7301 arg01 = fold_build1 (code, type, arg01);
7302 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7303 arg02 = fold_build1 (code, type, arg02);
7304 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7307 /* If this was a conversion, and all we did was to move into
7308 inside the COND_EXPR, bring it back out. But leave it if
7309 it is a conversion from integer to integer and the
7310 result precision is no wider than a word since such a
7311 conversion is cheap and may be optimized away by combine,
7312 while it couldn't if it were outside the COND_EXPR. Then return
7313 so we don't get into an infinite recursion loop taking the
7314 conversion out and then back in. */
7316 if ((code == NOP_EXPR || code == CONVERT_EXPR
7317 || code == NON_LVALUE_EXPR)
7318 && TREE_CODE (tem) == COND_EXPR
7319 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7320 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7321 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7322 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7323 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7324 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7325 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7327 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7328 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7329 || flag_syntax_only))
7330 tem = build1 (code, type,
7332 TREE_TYPE (TREE_OPERAND
7333 (TREE_OPERAND (tem, 1), 0)),
7334 TREE_OPERAND (tem, 0),
7335 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7336 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7339 else if (COMPARISON_CLASS_P (arg0))
7341 if (TREE_CODE (type) == BOOLEAN_TYPE)
7343 arg0 = copy_node (arg0);
7344 TREE_TYPE (arg0) = type;
7347 else if (TREE_CODE (type) != INTEGER_TYPE)
7348 return fold_build3 (COND_EXPR, type, arg0,
7349 fold_build1 (code, type,
7351 fold_build1 (code, type,
7352 integer_zero_node));
7361 case FIX_TRUNC_EXPR:
7362 if (TREE_TYPE (op0) == type)
7365 /* If we have (type) (a CMP b) and type is an integral type, return
7366 new expression involving the new type. */
7367 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7368 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7369 TREE_OPERAND (op0, 1));
7371 /* Handle cases of two conversions in a row. */
7372 if (TREE_CODE (op0) == NOP_EXPR
7373 || TREE_CODE (op0) == CONVERT_EXPR)
7375 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7376 tree inter_type = TREE_TYPE (op0);
7377 int inside_int = INTEGRAL_TYPE_P (inside_type);
7378 int inside_ptr = POINTER_TYPE_P (inside_type);
7379 int inside_float = FLOAT_TYPE_P (inside_type);
7380 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7381 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7382 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7383 int inter_int = INTEGRAL_TYPE_P (inter_type);
7384 int inter_ptr = POINTER_TYPE_P (inter_type);
7385 int inter_float = FLOAT_TYPE_P (inter_type);
7386 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7387 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7388 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7389 int final_int = INTEGRAL_TYPE_P (type);
7390 int final_ptr = POINTER_TYPE_P (type);
7391 int final_float = FLOAT_TYPE_P (type);
7392 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7393 unsigned int final_prec = TYPE_PRECISION (type);
7394 int final_unsignedp = TYPE_UNSIGNED (type);
7396 /* In addition to the cases of two conversions in a row
7397 handled below, if we are converting something to its own
7398 type via an object of identical or wider precision, neither
7399 conversion is needed. */
7400 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7401 && (((inter_int || inter_ptr) && final_int)
7402 || (inter_float && final_float))
7403 && inter_prec >= final_prec)
7404 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7406 /* Likewise, if the intermediate and final types are either both
7407 float or both integer, we don't need the middle conversion if
7408 it is wider than the final type and doesn't change the signedness
7409 (for integers). Avoid this if the final type is a pointer
7410 since then we sometimes need the inner conversion. Likewise if
7411 the outer has a precision not equal to the size of its mode. */
7412 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7413 || (inter_float && inside_float)
7414 || (inter_vec && inside_vec))
7415 && inter_prec >= inside_prec
7416 && (inter_float || inter_vec
7417 || inter_unsignedp == inside_unsignedp)
7418 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7419 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7421 && (! final_vec || inter_prec == inside_prec))
7422 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7424 /* If we have a sign-extension of a zero-extended value, we can
7425 replace that by a single zero-extension. */
7426 if (inside_int && inter_int && final_int
7427 && inside_prec < inter_prec && inter_prec < final_prec
7428 && inside_unsignedp && !inter_unsignedp)
7429 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7431 /* Two conversions in a row are not needed unless:
7432 - some conversion is floating-point (overstrict for now), or
7433 - some conversion is a vector (overstrict for now), or
7434 - the intermediate type is narrower than both initial and
7436 - the intermediate type and innermost type differ in signedness,
7437 and the outermost type is wider than the intermediate, or
7438 - the initial type is a pointer type and the precisions of the
7439 intermediate and final types differ, or
7440 - the final type is a pointer type and the precisions of the
7441 initial and intermediate types differ.
7442 - the final type is a pointer type and the initial type not
7443 - the initial type is a pointer to an array and the final type
7445 if (! inside_float && ! inter_float && ! final_float
7446 && ! inside_vec && ! inter_vec && ! final_vec
7447 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7448 && ! (inside_int && inter_int
7449 && inter_unsignedp != inside_unsignedp
7450 && inter_prec < final_prec)
7451 && ((inter_unsignedp && inter_prec > inside_prec)
7452 == (final_unsignedp && final_prec > inter_prec))
7453 && ! (inside_ptr && inter_prec != final_prec)
7454 && ! (final_ptr && inside_prec != inter_prec)
7455 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7456 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7457 && final_ptr == inside_ptr
7459 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7460 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7461 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7464 /* Handle (T *)&A.B.C for A being of type T and B and C
7465 living at offset zero. This occurs frequently in
7466 C++ upcasting and then accessing the base. */
7467 if (TREE_CODE (op0) == ADDR_EXPR
7468 && POINTER_TYPE_P (type)
7469 && handled_component_p (TREE_OPERAND (op0, 0)))
7471 HOST_WIDE_INT bitsize, bitpos;
7473 enum machine_mode mode;
7474 int unsignedp, volatilep;
7475 tree base = TREE_OPERAND (op0, 0);
7476 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7477 &mode, &unsignedp, &volatilep, false);
7478 /* If the reference was to a (constant) zero offset, we can use
7479 the address of the base if it has the same base type
7480 as the result type. */
7481 if (! offset && bitpos == 0
7482 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7483 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7484 return fold_convert (type, build_fold_addr_expr (base));
7487 if ((TREE_CODE (op0) == MODIFY_EXPR
7488 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7489 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7490 /* Detect assigning a bitfield. */
7491 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7493 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7495 /* Don't leave an assignment inside a conversion
7496 unless assigning a bitfield. */
7497 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7498 /* First do the assignment, then return converted constant. */
7499 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7500 TREE_NO_WARNING (tem) = 1;
7501 TREE_USED (tem) = 1;
7505 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7506 constants (if x has signed type, the sign bit cannot be set
7507 in c). This folds extension into the BIT_AND_EXPR. */
7508 if (INTEGRAL_TYPE_P (type)
7509 && TREE_CODE (type) != BOOLEAN_TYPE
7510 && TREE_CODE (op0) == BIT_AND_EXPR
7511 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7514 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7517 if (TYPE_UNSIGNED (TREE_TYPE (and))
7518 || (TYPE_PRECISION (type)
7519 <= TYPE_PRECISION (TREE_TYPE (and))))
7521 else if (TYPE_PRECISION (TREE_TYPE (and1))
7522 <= HOST_BITS_PER_WIDE_INT
7523 && host_integerp (and1, 1))
7525 unsigned HOST_WIDE_INT cst;
7527 cst = tree_low_cst (and1, 1);
7528 cst &= (HOST_WIDE_INT) -1
7529 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7530 change = (cst == 0);
7531 #ifdef LOAD_EXTEND_OP
7533 && !flag_syntax_only
7534 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7537 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7538 and0 = fold_convert (uns, and0);
7539 and1 = fold_convert (uns, and1);
7545 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7546 TREE_INT_CST_HIGH (and1), 0,
7547 TREE_OVERFLOW (and1));
7548 return fold_build2 (BIT_AND_EXPR, type,
7549 fold_convert (type, and0), tem);
7553 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7554 T2 being pointers to types of the same size. */
7555 if (POINTER_TYPE_P (type)
7556 && BINARY_CLASS_P (arg0)
7557 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7558 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7560 tree arg00 = TREE_OPERAND (arg0, 0);
7562 tree t1 = TREE_TYPE (arg00);
7563 tree tt0 = TREE_TYPE (t0);
7564 tree tt1 = TREE_TYPE (t1);
7565 tree s0 = TYPE_SIZE (tt0);
7566 tree s1 = TYPE_SIZE (tt1);
7568 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7569 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7570 TREE_OPERAND (arg0, 1));
7573 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7574 of the same precision, and X is a integer type not narrower than
7575 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7576 if (INTEGRAL_TYPE_P (type)
7577 && TREE_CODE (op0) == BIT_NOT_EXPR
7578 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7579 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7580 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7581 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7583 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7584 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7585 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7586 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7589 tem = fold_convert_const (code, type, arg0);
7590 return tem ? tem : NULL_TREE;
7592 case VIEW_CONVERT_EXPR:
7593 if (TREE_TYPE (op0) == type)
7595 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7596 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7597 return fold_view_convert_expr (type, op0);
7600 tem = fold_negate_expr (arg0);
7602 return fold_convert (type, tem);
7606 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7607 return fold_abs_const (arg0, type);
7608 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7609 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7610 /* Convert fabs((double)float) into (double)fabsf(float). */
7611 else if (TREE_CODE (arg0) == NOP_EXPR
7612 && TREE_CODE (type) == REAL_TYPE)
7614 tree targ0 = strip_float_extensions (arg0);
7616 return fold_convert (type, fold_build1 (ABS_EXPR,
7620 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7621 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7624 /* Strip sign ops from argument. */
7625 if (TREE_CODE (type) == REAL_TYPE)
7627 tem = fold_strip_sign_ops (arg0);
7629 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7634 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7635 return fold_convert (type, arg0);
7636 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7638 tree itype = TREE_TYPE (type);
7639 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7640 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7641 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7643 if (TREE_CODE (arg0) == COMPLEX_CST)
7645 tree itype = TREE_TYPE (type);
7646 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7647 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7648 return build_complex (type, rpart, negate_expr (ipart));
7650 if (TREE_CODE (arg0) == CONJ_EXPR)
7651 return fold_convert (type, TREE_OPERAND (arg0, 0));
7655 if (TREE_CODE (arg0) == INTEGER_CST)
7656 return fold_not_const (arg0, type);
7657 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7658 return TREE_OPERAND (arg0, 0);
7659 /* Convert ~ (-A) to A - 1. */
7660 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7661 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7662 build_int_cst (type, 1));
7663 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7664 else if (INTEGRAL_TYPE_P (type)
7665 && ((TREE_CODE (arg0) == MINUS_EXPR
7666 && integer_onep (TREE_OPERAND (arg0, 1)))
7667 || (TREE_CODE (arg0) == PLUS_EXPR
7668 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7669 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7670 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7671 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7672 && (tem = fold_unary (BIT_NOT_EXPR, type,
7674 TREE_OPERAND (arg0, 0)))))
7675 return fold_build2 (BIT_XOR_EXPR, type, tem,
7676 fold_convert (type, TREE_OPERAND (arg0, 1)));
7677 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7678 && (tem = fold_unary (BIT_NOT_EXPR, type,
7680 TREE_OPERAND (arg0, 1)))))
7681 return fold_build2 (BIT_XOR_EXPR, type,
7682 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7686 case TRUTH_NOT_EXPR:
7687 /* The argument to invert_truthvalue must have Boolean type. */
7688 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7689 arg0 = fold_convert (boolean_type_node, arg0);
7691 /* Note that the operand of this must be an int
7692 and its values must be 0 or 1.
7693 ("true" is a fixed value perhaps depending on the language,
7694 but we don't handle values other than 1 correctly yet.) */
7695 tem = fold_truth_not_expr (arg0);
7698 return fold_convert (type, tem);
7701 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7702 return fold_convert (type, arg0);
7703 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7704 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7705 TREE_OPERAND (arg0, 1));
7706 if (TREE_CODE (arg0) == COMPLEX_CST)
7707 return fold_convert (type, TREE_REALPART (arg0));
7708 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7710 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7711 tem = fold_build2 (TREE_CODE (arg0), itype,
7712 fold_build1 (REALPART_EXPR, itype,
7713 TREE_OPERAND (arg0, 0)),
7714 fold_build1 (REALPART_EXPR, itype,
7715 TREE_OPERAND (arg0, 1)));
7716 return fold_convert (type, tem);
7718 if (TREE_CODE (arg0) == CONJ_EXPR)
7720 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7721 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7722 return fold_convert (type, tem);
7724 if (TREE_CODE (arg0) == CALL_EXPR)
7726 tree fn = get_callee_fndecl (arg0);
7727 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7728 switch (DECL_FUNCTION_CODE (fn))
7730 CASE_FLT_FN (BUILT_IN_CEXPI):
7731 fn = mathfn_built_in (type, BUILT_IN_COS);
7732 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7740 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7741 return fold_convert (type, integer_zero_node);
7742 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7743 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7744 TREE_OPERAND (arg0, 0));
7745 if (TREE_CODE (arg0) == COMPLEX_CST)
7746 return fold_convert (type, TREE_IMAGPART (arg0));
7747 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7749 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7750 tem = fold_build2 (TREE_CODE (arg0), itype,
7751 fold_build1 (IMAGPART_EXPR, itype,
7752 TREE_OPERAND (arg0, 0)),
7753 fold_build1 (IMAGPART_EXPR, itype,
7754 TREE_OPERAND (arg0, 1)));
7755 return fold_convert (type, tem);
7757 if (TREE_CODE (arg0) == CONJ_EXPR)
7759 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7760 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7761 return fold_convert (type, negate_expr (tem));
7763 if (TREE_CODE (arg0) == CALL_EXPR)
7765 tree fn = get_callee_fndecl (arg0);
7766 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7767 switch (DECL_FUNCTION_CODE (fn))
7769 CASE_FLT_FN (BUILT_IN_CEXPI):
7770 fn = mathfn_built_in (type, BUILT_IN_SIN);
7771 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7780 } /* switch (code) */
7783 /* Fold a binary expression of code CODE and type TYPE with operands
7784 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7785 Return the folded expression if folding is successful. Otherwise,
7786 return NULL_TREE. */
7789 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7791 enum tree_code compl_code;
7793 if (code == MIN_EXPR)
7794 compl_code = MAX_EXPR;
7795 else if (code == MAX_EXPR)
7796 compl_code = MIN_EXPR;
7800 /* MIN (MAX (a, b), b) == b. */
7801 if (TREE_CODE (op0) == compl_code
7802 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7803 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7805 /* MIN (MAX (b, a), b) == b. */
7806 if (TREE_CODE (op0) == compl_code
7807 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7808 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7809 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7811 /* MIN (a, MAX (a, b)) == a. */
7812 if (TREE_CODE (op1) == compl_code
7813 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7814 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7815 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7817 /* MIN (a, MAX (b, a)) == a. */
7818 if (TREE_CODE (op1) == compl_code
7819 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7820 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7821 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7826 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7827 by changing CODE to reduce the magnitude of constants involved in
7828 ARG0 of the comparison.
7829 Returns a canonicalized comparison tree if a simplification was
7830 possible, otherwise returns NULL_TREE. */
7833 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7834 tree arg0, tree arg1)
7836 enum tree_code code0 = TREE_CODE (arg0);
7837 tree t, cst0 = NULL_TREE;
7841 /* Match A +- CST code arg1 and CST code arg1. */
7842 if (!(((code0 == MINUS_EXPR
7843 || code0 == PLUS_EXPR)
7844 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7845 || code0 == INTEGER_CST))
7848 /* Identify the constant in arg0 and its sign. */
7849 if (code0 == INTEGER_CST)
7852 cst0 = TREE_OPERAND (arg0, 1);
7853 sgn0 = tree_int_cst_sgn (cst0);
7855 /* Overflowed constants and zero will cause problems. */
7856 if (integer_zerop (cst0)
7857 || TREE_OVERFLOW (cst0))
7860 /* See if we can reduce the magnitude of the constant in
7861 arg0 by changing the comparison code. */
7862 if (code0 == INTEGER_CST)
7864 /* CST <= arg1 -> CST-1 < arg1. */
7865 if (code == LE_EXPR && sgn0 == 1)
7867 /* -CST < arg1 -> -CST-1 <= arg1. */
7868 else if (code == LT_EXPR && sgn0 == -1)
7870 /* CST > arg1 -> CST-1 >= arg1. */
7871 else if (code == GT_EXPR && sgn0 == 1)
7873 /* -CST >= arg1 -> -CST-1 > arg1. */
7874 else if (code == GE_EXPR && sgn0 == -1)
7878 /* arg1 code' CST' might be more canonical. */
7883 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7885 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7887 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7888 else if (code == GT_EXPR
7889 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7891 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7892 else if (code == LE_EXPR
7893 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7895 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7896 else if (code == GE_EXPR
7897 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7903 /* Now build the constant reduced in magnitude. */
7904 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7905 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7906 if (code0 != INTEGER_CST)
7907 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7909 /* If swapping might yield to a more canonical form, do so. */
7911 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7913 return fold_build2 (code, type, t, arg1);
7916 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7917 overflow further. Try to decrease the magnitude of constants involved
7918 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7919 and put sole constants at the second argument position.
7920 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7923 maybe_canonicalize_comparison (enum tree_code code, tree type,
7924 tree arg0, tree arg1)
7928 /* In principle pointers also have undefined overflow behavior,
7929 but that causes problems elsewhere. */
7930 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
7931 || POINTER_TYPE_P (TREE_TYPE (arg0)))
7934 /* Try canonicalization by simplifying arg0. */
7935 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7939 /* Try canonicalization by simplifying arg1 using the swapped
7941 code = swap_tree_comparison (code);
7942 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7945 /* Subroutine of fold_binary. This routine performs all of the
7946 transformations that are common to the equality/inequality
7947 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7948 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7949 fold_binary should call fold_binary. Fold a comparison with
7950 tree code CODE and type TYPE with operands OP0 and OP1. Return
7951 the folded comparison or NULL_TREE. */
7954 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7956 tree arg0, arg1, tem;
7961 STRIP_SIGN_NOPS (arg0);
7962 STRIP_SIGN_NOPS (arg1);
7964 tem = fold_relational_const (code, type, arg0, arg1);
7965 if (tem != NULL_TREE)
7968 /* If one arg is a real or integer constant, put it last. */
7969 if (tree_swap_operands_p (arg0, arg1, true))
7970 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7972 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7973 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7974 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7975 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7976 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
7977 && (TREE_CODE (arg1) == INTEGER_CST
7978 && !TREE_OVERFLOW (arg1)))
7980 tree const1 = TREE_OPERAND (arg0, 1);
7982 tree variable = TREE_OPERAND (arg0, 0);
7985 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7987 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7988 TREE_TYPE (arg1), const2, const1);
7989 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7990 && (TREE_CODE (lhs) != INTEGER_CST
7991 || !TREE_OVERFLOW (lhs)))
7992 return fold_build2 (code, type, variable, lhs);
7995 /* For comparisons of pointers we can decompose it to a compile time
7996 comparison of the base objects and the offsets into the object.
7997 This requires at least one operand being an ADDR_EXPR to do more
7998 than the operand_equal_p test below. */
7999 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8000 && (TREE_CODE (arg0) == ADDR_EXPR
8001 || TREE_CODE (arg1) == ADDR_EXPR))
8003 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8004 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8005 enum machine_mode mode;
8006 int volatilep, unsignedp;
8007 bool indirect_base0 = false;
8009 /* Get base and offset for the access. Strip ADDR_EXPR for
8010 get_inner_reference, but put it back by stripping INDIRECT_REF
8011 off the base object if possible. */
8013 if (TREE_CODE (arg0) == ADDR_EXPR)
8015 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8016 &bitsize, &bitpos0, &offset0, &mode,
8017 &unsignedp, &volatilep, false);
8018 if (TREE_CODE (base0) == INDIRECT_REF)
8019 base0 = TREE_OPERAND (base0, 0);
8021 indirect_base0 = true;
8025 if (TREE_CODE (arg1) == ADDR_EXPR)
8027 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8028 &bitsize, &bitpos1, &offset1, &mode,
8029 &unsignedp, &volatilep, false);
8030 /* We have to make sure to have an indirect/non-indirect base1
8031 just the same as we did for base0. */
8032 if (TREE_CODE (base1) == INDIRECT_REF
8034 base1 = TREE_OPERAND (base1, 0);
8035 else if (!indirect_base0)
8038 else if (indirect_base0)
8041 /* If we have equivalent bases we might be able to simplify. */
8043 && operand_equal_p (base0, base1, 0))
8045 /* We can fold this expression to a constant if the non-constant
8046 offset parts are equal. */
8047 if (offset0 == offset1
8048 || (offset0 && offset1
8049 && operand_equal_p (offset0, offset1, 0)))
8054 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8056 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8058 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8060 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8062 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8064 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8068 /* We can simplify the comparison to a comparison of the variable
8069 offset parts if the constant offset parts are equal.
8070 Be careful to use signed size type here because otherwise we
8071 mess with array offsets in the wrong way. This is possible
8072 because pointer arithmetic is restricted to retain within an
8073 object and overflow on pointer differences is undefined as of
8074 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8075 else if (bitpos0 == bitpos1)
8077 tree signed_size_type_node;
8078 signed_size_type_node = signed_type_for (size_type_node);
8080 /* By converting to signed size type we cover middle-end pointer
8081 arithmetic which operates on unsigned pointer types of size
8082 type size and ARRAY_REF offsets which are properly sign or
8083 zero extended from their type in case it is narrower than
8085 if (offset0 == NULL_TREE)
8086 offset0 = build_int_cst (signed_size_type_node, 0);
8088 offset0 = fold_convert (signed_size_type_node, offset0);
8089 if (offset1 == NULL_TREE)
8090 offset1 = build_int_cst (signed_size_type_node, 0);
8092 offset1 = fold_convert (signed_size_type_node, offset1);
8094 return fold_build2 (code, type, offset0, offset1);
8099 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8100 same object, then we can fold this to a comparison of the two offsets in
8101 signed size type. This is possible because pointer arithmetic is
8102 restricted to retain within an object and overflow on pointer differences
8103 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8105 We check flag_wrapv directly because pointers types are unsigned,
8106 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8107 normally what we want to avoid certain odd overflow cases, but
8109 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8111 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8113 tree base0, offset0, base1, offset1;
8115 if (extract_array_ref (arg0, &base0, &offset0)
8116 && extract_array_ref (arg1, &base1, &offset1)
8117 && operand_equal_p (base0, base1, 0))
8119 tree signed_size_type_node;
8120 signed_size_type_node = signed_type_for (size_type_node);
8122 /* By converting to signed size type we cover middle-end pointer
8123 arithmetic which operates on unsigned pointer types of size
8124 type size and ARRAY_REF offsets which are properly sign or
8125 zero extended from their type in case it is narrower than
8127 if (offset0 == NULL_TREE)
8128 offset0 = build_int_cst (signed_size_type_node, 0);
8130 offset0 = fold_convert (signed_size_type_node, offset0);
8131 if (offset1 == NULL_TREE)
8132 offset1 = build_int_cst (signed_size_type_node, 0);
8134 offset1 = fold_convert (signed_size_type_node, offset1);
8136 return fold_build2 (code, type, offset0, offset1);
8140 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8141 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8142 the resulting offset is smaller in absolute value than the
8144 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8145 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8146 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8147 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8148 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8149 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8150 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8152 tree const1 = TREE_OPERAND (arg0, 1);
8153 tree const2 = TREE_OPERAND (arg1, 1);
8154 tree variable1 = TREE_OPERAND (arg0, 0);
8155 tree variable2 = TREE_OPERAND (arg1, 0);
8158 /* Put the constant on the side where it doesn't overflow and is
8159 of lower absolute value than before. */
8160 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8161 ? MINUS_EXPR : PLUS_EXPR,
8163 if (!TREE_OVERFLOW (cst)
8164 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8165 return fold_build2 (code, type,
8167 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8170 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8171 ? MINUS_EXPR : PLUS_EXPR,
8173 if (!TREE_OVERFLOW (cst)
8174 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8175 return fold_build2 (code, type,
8176 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8181 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8182 signed arithmetic case. That form is created by the compiler
8183 often enough for folding it to be of value. One example is in
8184 computing loop trip counts after Operator Strength Reduction. */
8185 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8186 && TREE_CODE (arg0) == MULT_EXPR
8187 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8188 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8189 && integer_zerop (arg1))
8191 tree const1 = TREE_OPERAND (arg0, 1);
8192 tree const2 = arg1; /* zero */
8193 tree variable1 = TREE_OPERAND (arg0, 0);
8194 enum tree_code cmp_code = code;
8196 gcc_assert (!integer_zerop (const1));
8198 /* If const1 is negative we swap the sense of the comparison. */
8199 if (tree_int_cst_sgn (const1) < 0)
8200 cmp_code = swap_tree_comparison (cmp_code);
8202 return fold_build2 (cmp_code, type, variable1, const2);
8205 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8209 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8211 tree targ0 = strip_float_extensions (arg0);
8212 tree targ1 = strip_float_extensions (arg1);
8213 tree newtype = TREE_TYPE (targ0);
8215 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8216 newtype = TREE_TYPE (targ1);
8218 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8219 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8220 return fold_build2 (code, type, fold_convert (newtype, targ0),
8221 fold_convert (newtype, targ1));
8223 /* (-a) CMP (-b) -> b CMP a */
8224 if (TREE_CODE (arg0) == NEGATE_EXPR
8225 && TREE_CODE (arg1) == NEGATE_EXPR)
8226 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8227 TREE_OPERAND (arg0, 0));
8229 if (TREE_CODE (arg1) == REAL_CST)
8231 REAL_VALUE_TYPE cst;
8232 cst = TREE_REAL_CST (arg1);
8234 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8235 if (TREE_CODE (arg0) == NEGATE_EXPR)
8236 return fold_build2 (swap_tree_comparison (code), type,
8237 TREE_OPERAND (arg0, 0),
8238 build_real (TREE_TYPE (arg1),
8239 REAL_VALUE_NEGATE (cst)));
8241 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8242 /* a CMP (-0) -> a CMP 0 */
8243 if (REAL_VALUE_MINUS_ZERO (cst))
8244 return fold_build2 (code, type, arg0,
8245 build_real (TREE_TYPE (arg1), dconst0));
8247 /* x != NaN is always true, other ops are always false. */
8248 if (REAL_VALUE_ISNAN (cst)
8249 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8251 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8252 return omit_one_operand (type, tem, arg0);
8255 /* Fold comparisons against infinity. */
8256 if (REAL_VALUE_ISINF (cst))
8258 tem = fold_inf_compare (code, type, arg0, arg1);
8259 if (tem != NULL_TREE)
8264 /* If this is a comparison of a real constant with a PLUS_EXPR
8265 or a MINUS_EXPR of a real constant, we can convert it into a
8266 comparison with a revised real constant as long as no overflow
8267 occurs when unsafe_math_optimizations are enabled. */
8268 if (flag_unsafe_math_optimizations
8269 && TREE_CODE (arg1) == REAL_CST
8270 && (TREE_CODE (arg0) == PLUS_EXPR
8271 || TREE_CODE (arg0) == MINUS_EXPR)
8272 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8273 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8274 ? MINUS_EXPR : PLUS_EXPR,
8275 arg1, TREE_OPERAND (arg0, 1), 0))
8276 && !TREE_OVERFLOW (tem))
8277 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8279 /* Likewise, we can simplify a comparison of a real constant with
8280 a MINUS_EXPR whose first operand is also a real constant, i.e.
8281 (c1 - x) < c2 becomes x > c1-c2. */
8282 if (flag_unsafe_math_optimizations
8283 && TREE_CODE (arg1) == REAL_CST
8284 && TREE_CODE (arg0) == MINUS_EXPR
8285 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8286 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8288 && !TREE_OVERFLOW (tem))
8289 return fold_build2 (swap_tree_comparison (code), type,
8290 TREE_OPERAND (arg0, 1), tem);
8292 /* Fold comparisons against built-in math functions. */
8293 if (TREE_CODE (arg1) == REAL_CST
8294 && flag_unsafe_math_optimizations
8295 && ! flag_errno_math)
8297 enum built_in_function fcode = builtin_mathfn_code (arg0);
8299 if (fcode != END_BUILTINS)
8301 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8302 if (tem != NULL_TREE)
8308 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8309 if (TREE_CONSTANT (arg1)
8310 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8311 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8312 /* This optimization is invalid for ordered comparisons
8313 if CONST+INCR overflows or if foo+incr might overflow.
8314 This optimization is invalid for floating point due to rounding.
8315 For pointer types we assume overflow doesn't happen. */
8316 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8317 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8318 && (code == EQ_EXPR || code == NE_EXPR))))
8320 tree varop, newconst;
8322 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8324 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8325 arg1, TREE_OPERAND (arg0, 1));
8326 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8327 TREE_OPERAND (arg0, 0),
8328 TREE_OPERAND (arg0, 1));
8332 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8333 arg1, TREE_OPERAND (arg0, 1));
8334 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8335 TREE_OPERAND (arg0, 0),
8336 TREE_OPERAND (arg0, 1));
8340 /* If VAROP is a reference to a bitfield, we must mask
8341 the constant by the width of the field. */
8342 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8343 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8344 && host_integerp (DECL_SIZE (TREE_OPERAND
8345 (TREE_OPERAND (varop, 0), 1)), 1))
8347 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8348 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8349 tree folded_compare, shift;
8351 /* First check whether the comparison would come out
8352 always the same. If we don't do that we would
8353 change the meaning with the masking. */
8354 folded_compare = fold_build2 (code, type,
8355 TREE_OPERAND (varop, 0), arg1);
8356 if (TREE_CODE (folded_compare) == INTEGER_CST)
8357 return omit_one_operand (type, folded_compare, varop);
8359 shift = build_int_cst (NULL_TREE,
8360 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8361 shift = fold_convert (TREE_TYPE (varop), shift);
8362 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8364 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8368 return fold_build2 (code, type, varop, newconst);
8371 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8372 && (TREE_CODE (arg0) == NOP_EXPR
8373 || TREE_CODE (arg0) == CONVERT_EXPR))
8375 /* If we are widening one operand of an integer comparison,
8376 see if the other operand is similarly being widened. Perhaps we
8377 can do the comparison in the narrower type. */
8378 tem = fold_widened_comparison (code, type, arg0, arg1);
8382 /* Or if we are changing signedness. */
8383 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8388 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8389 constant, we can simplify it. */
8390 if (TREE_CODE (arg1) == INTEGER_CST
8391 && (TREE_CODE (arg0) == MIN_EXPR
8392 || TREE_CODE (arg0) == MAX_EXPR)
8393 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8395 tem = optimize_minmax_comparison (code, type, op0, op1);
8400 /* Simplify comparison of something with itself. (For IEEE
8401 floating-point, we can only do some of these simplifications.) */
8402 if (operand_equal_p (arg0, arg1, 0))
8407 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8408 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8409 return constant_boolean_node (1, type);
8414 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8415 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8416 return constant_boolean_node (1, type);
8417 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8420 /* For NE, we can only do this simplification if integer
8421 or we don't honor IEEE floating point NaNs. */
8422 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8423 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8425 /* ... fall through ... */
8428 return constant_boolean_node (0, type);
8434 /* If we are comparing an expression that just has comparisons
8435 of two integer values, arithmetic expressions of those comparisons,
8436 and constants, we can simplify it. There are only three cases
8437 to check: the two values can either be equal, the first can be
8438 greater, or the second can be greater. Fold the expression for
8439 those three values. Since each value must be 0 or 1, we have
8440 eight possibilities, each of which corresponds to the constant 0
8441 or 1 or one of the six possible comparisons.
8443 This handles common cases like (a > b) == 0 but also handles
8444 expressions like ((x > y) - (y > x)) > 0, which supposedly
8445 occur in macroized code. */
8447 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8449 tree cval1 = 0, cval2 = 0;
8452 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8453 /* Don't handle degenerate cases here; they should already
8454 have been handled anyway. */
8455 && cval1 != 0 && cval2 != 0
8456 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8457 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8458 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8459 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8460 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8461 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8462 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8464 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8465 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8467 /* We can't just pass T to eval_subst in case cval1 or cval2
8468 was the same as ARG1. */
8471 = fold_build2 (code, type,
8472 eval_subst (arg0, cval1, maxval,
8476 = fold_build2 (code, type,
8477 eval_subst (arg0, cval1, maxval,
8481 = fold_build2 (code, type,
8482 eval_subst (arg0, cval1, minval,
8486 /* All three of these results should be 0 or 1. Confirm they are.
8487 Then use those values to select the proper code to use. */
8489 if (TREE_CODE (high_result) == INTEGER_CST
8490 && TREE_CODE (equal_result) == INTEGER_CST
8491 && TREE_CODE (low_result) == INTEGER_CST)
8493 /* Make a 3-bit mask with the high-order bit being the
8494 value for `>', the next for '=', and the low for '<'. */
8495 switch ((integer_onep (high_result) * 4)
8496 + (integer_onep (equal_result) * 2)
8497 + integer_onep (low_result))
8501 return omit_one_operand (type, integer_zero_node, arg0);
8522 return omit_one_operand (type, integer_one_node, arg0);
8526 return save_expr (build2 (code, type, cval1, cval2));
8527 return fold_build2 (code, type, cval1, cval2);
8532 /* Fold a comparison of the address of COMPONENT_REFs with the same
8533 type and component to a comparison of the address of the base
8534 object. In short, &x->a OP &y->a to x OP y and
8535 &x->a OP &y.a to x OP &y */
8536 if (TREE_CODE (arg0) == ADDR_EXPR
8537 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8538 && TREE_CODE (arg1) == ADDR_EXPR
8539 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8541 tree cref0 = TREE_OPERAND (arg0, 0);
8542 tree cref1 = TREE_OPERAND (arg1, 0);
8543 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8545 tree op0 = TREE_OPERAND (cref0, 0);
8546 tree op1 = TREE_OPERAND (cref1, 0);
8547 return fold_build2 (code, type,
8548 build_fold_addr_expr (op0),
8549 build_fold_addr_expr (op1));
8553 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8554 into a single range test. */
8555 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8556 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8557 && TREE_CODE (arg1) == INTEGER_CST
8558 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8559 && !integer_zerop (TREE_OPERAND (arg0, 1))
8560 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8561 && !TREE_OVERFLOW (arg1))
8563 tem = fold_div_compare (code, type, arg0, arg1);
8564 if (tem != NULL_TREE)
8568 /* Fold ~X op ~Y as Y op X. */
8569 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8570 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8571 return fold_build2 (code, type,
8572 TREE_OPERAND (arg1, 0),
8573 TREE_OPERAND (arg0, 0));
8575 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8576 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8577 && TREE_CODE (arg1) == INTEGER_CST)
8578 return fold_build2 (swap_tree_comparison (code), type,
8579 TREE_OPERAND (arg0, 0),
8580 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8586 /* Subroutine of fold_binary. Optimize complex multiplications of the
8587 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8588 argument EXPR represents the expression "z" of type TYPE. */
8591 fold_mult_zconjz (tree type, tree expr)
8593 tree itype = TREE_TYPE (type);
8594 tree rpart, ipart, tem;
8596 if (TREE_CODE (expr) == COMPLEX_EXPR)
8598 rpart = TREE_OPERAND (expr, 0);
8599 ipart = TREE_OPERAND (expr, 1);
8601 else if (TREE_CODE (expr) == COMPLEX_CST)
8603 rpart = TREE_REALPART (expr);
8604 ipart = TREE_IMAGPART (expr);
8608 expr = save_expr (expr);
8609 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8610 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8613 rpart = save_expr (rpart);
8614 ipart = save_expr (ipart);
8615 tem = fold_build2 (PLUS_EXPR, itype,
8616 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8617 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8618 return fold_build2 (COMPLEX_EXPR, type, tem,
8619 fold_convert (itype, integer_zero_node));
8623 /* Fold a binary expression of code CODE and type TYPE with operands
8624 OP0 and OP1. Return the folded expression if folding is
8625 successful. Otherwise, return NULL_TREE. */
8628 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8630 enum tree_code_class kind = TREE_CODE_CLASS (code);
8631 tree arg0, arg1, tem;
8632 tree t1 = NULL_TREE;
8634 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8635 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8636 && TREE_CODE_LENGTH (code) == 2
8638 && op1 != NULL_TREE);
8643 /* Strip any conversions that don't change the mode. This is
8644 safe for every expression, except for a comparison expression
8645 because its signedness is derived from its operands. So, in
8646 the latter case, only strip conversions that don't change the
8649 Note that this is done as an internal manipulation within the
8650 constant folder, in order to find the simplest representation
8651 of the arguments so that their form can be studied. In any
8652 cases, the appropriate type conversions should be put back in
8653 the tree that will get out of the constant folder. */
8655 if (kind == tcc_comparison)
8657 STRIP_SIGN_NOPS (arg0);
8658 STRIP_SIGN_NOPS (arg1);
8666 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8667 constant but we can't do arithmetic on them. */
8668 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8669 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8670 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8671 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8673 if (kind == tcc_binary)
8674 tem = const_binop (code, arg0, arg1, 0);
8675 else if (kind == tcc_comparison)
8676 tem = fold_relational_const (code, type, arg0, arg1);
8680 if (tem != NULL_TREE)
8682 if (TREE_TYPE (tem) != type)
8683 tem = fold_convert (type, tem);
8688 /* If this is a commutative operation, and ARG0 is a constant, move it
8689 to ARG1 to reduce the number of tests below. */
8690 if (commutative_tree_code (code)
8691 && tree_swap_operands_p (arg0, arg1, true))
8692 return fold_build2 (code, type, op1, op0);
8694 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8696 First check for cases where an arithmetic operation is applied to a
8697 compound, conditional, or comparison operation. Push the arithmetic
8698 operation inside the compound or conditional to see if any folding
8699 can then be done. Convert comparison to conditional for this purpose.
8700 The also optimizes non-constant cases that used to be done in
8703 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8704 one of the operands is a comparison and the other is a comparison, a
8705 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8706 code below would make the expression more complex. Change it to a
8707 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8708 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8710 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8711 || code == EQ_EXPR || code == NE_EXPR)
8712 && ((truth_value_p (TREE_CODE (arg0))
8713 && (truth_value_p (TREE_CODE (arg1))
8714 || (TREE_CODE (arg1) == BIT_AND_EXPR
8715 && integer_onep (TREE_OPERAND (arg1, 1)))))
8716 || (truth_value_p (TREE_CODE (arg1))
8717 && (truth_value_p (TREE_CODE (arg0))
8718 || (TREE_CODE (arg0) == BIT_AND_EXPR
8719 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8721 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8722 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8725 fold_convert (boolean_type_node, arg0),
8726 fold_convert (boolean_type_node, arg1));
8728 if (code == EQ_EXPR)
8729 tem = invert_truthvalue (tem);
8731 return fold_convert (type, tem);
8734 if (TREE_CODE_CLASS (code) == tcc_binary
8735 || TREE_CODE_CLASS (code) == tcc_comparison)
8737 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8738 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8739 fold_build2 (code, type,
8740 TREE_OPERAND (arg0, 1), op1));
8741 if (TREE_CODE (arg1) == COMPOUND_EXPR
8742 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8743 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8744 fold_build2 (code, type,
8745 op0, TREE_OPERAND (arg1, 1)));
8747 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8749 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8751 /*cond_first_p=*/1);
8752 if (tem != NULL_TREE)
8756 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8758 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8760 /*cond_first_p=*/0);
8761 if (tem != NULL_TREE)
8769 /* A + (-B) -> A - B */
8770 if (TREE_CODE (arg1) == NEGATE_EXPR)
8771 return fold_build2 (MINUS_EXPR, type,
8772 fold_convert (type, arg0),
8773 fold_convert (type, TREE_OPERAND (arg1, 0)));
8774 /* (-A) + B -> B - A */
8775 if (TREE_CODE (arg0) == NEGATE_EXPR
8776 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8777 return fold_build2 (MINUS_EXPR, type,
8778 fold_convert (type, arg1),
8779 fold_convert (type, TREE_OPERAND (arg0, 0)));
8780 /* Convert ~A + 1 to -A. */
8781 if (INTEGRAL_TYPE_P (type)
8782 && TREE_CODE (arg0) == BIT_NOT_EXPR
8783 && integer_onep (arg1))
8784 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8786 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8788 if ((TREE_CODE (arg0) == MULT_EXPR
8789 || TREE_CODE (arg1) == MULT_EXPR)
8790 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8792 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8797 if (! FLOAT_TYPE_P (type))
8799 if (integer_zerop (arg1))
8800 return non_lvalue (fold_convert (type, arg0));
8803 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8804 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8805 && !TYPE_OVERFLOW_TRAPS (type))
8807 t1 = build_int_cst_type (type, -1);
8808 return omit_one_operand (type, t1, arg1);
8812 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8813 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8814 && !TYPE_OVERFLOW_TRAPS (type))
8816 t1 = build_int_cst_type (type, -1);
8817 return omit_one_operand (type, t1, arg0);
8820 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8821 with a constant, and the two constants have no bits in common,
8822 we should treat this as a BIT_IOR_EXPR since this may produce more
8824 if (TREE_CODE (arg0) == BIT_AND_EXPR
8825 && TREE_CODE (arg1) == BIT_AND_EXPR
8826 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8827 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8828 && integer_zerop (const_binop (BIT_AND_EXPR,
8829 TREE_OPERAND (arg0, 1),
8830 TREE_OPERAND (arg1, 1), 0)))
8832 code = BIT_IOR_EXPR;
8836 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8837 (plus (plus (mult) (mult)) (foo)) so that we can
8838 take advantage of the factoring cases below. */
8839 if (((TREE_CODE (arg0) == PLUS_EXPR
8840 || TREE_CODE (arg0) == MINUS_EXPR)
8841 && TREE_CODE (arg1) == MULT_EXPR)
8842 || ((TREE_CODE (arg1) == PLUS_EXPR
8843 || TREE_CODE (arg1) == MINUS_EXPR)
8844 && TREE_CODE (arg0) == MULT_EXPR))
8846 tree parg0, parg1, parg, marg;
8847 enum tree_code pcode;
8849 if (TREE_CODE (arg1) == MULT_EXPR)
8850 parg = arg0, marg = arg1;
8852 parg = arg1, marg = arg0;
8853 pcode = TREE_CODE (parg);
8854 parg0 = TREE_OPERAND (parg, 0);
8855 parg1 = TREE_OPERAND (parg, 1);
8859 if (TREE_CODE (parg0) == MULT_EXPR
8860 && TREE_CODE (parg1) != MULT_EXPR)
8861 return fold_build2 (pcode, type,
8862 fold_build2 (PLUS_EXPR, type,
8863 fold_convert (type, parg0),
8864 fold_convert (type, marg)),
8865 fold_convert (type, parg1));
8866 if (TREE_CODE (parg0) != MULT_EXPR
8867 && TREE_CODE (parg1) == MULT_EXPR)
8868 return fold_build2 (PLUS_EXPR, type,
8869 fold_convert (type, parg0),
8870 fold_build2 (pcode, type,
8871 fold_convert (type, marg),
8876 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8877 of the array. Loop optimizer sometimes produce this type of
8879 if (TREE_CODE (arg0) == ADDR_EXPR)
8881 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8883 return fold_convert (type, tem);
8885 else if (TREE_CODE (arg1) == ADDR_EXPR)
8887 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8889 return fold_convert (type, tem);
8894 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8895 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8896 return non_lvalue (fold_convert (type, arg0));
8898 /* Likewise if the operands are reversed. */
8899 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8900 return non_lvalue (fold_convert (type, arg1));
8902 /* Convert X + -C into X - C. */
8903 if (TREE_CODE (arg1) == REAL_CST
8904 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8906 tem = fold_negate_const (arg1, type);
8907 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8908 return fold_build2 (MINUS_EXPR, type,
8909 fold_convert (type, arg0),
8910 fold_convert (type, tem));
8913 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8914 to __complex__ ( x, y ). This is not the same for SNaNs or
8915 if singed zeros are involved. */
8916 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8917 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8918 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8920 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8921 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8922 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8923 bool arg0rz = false, arg0iz = false;
8924 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8925 || (arg0i && (arg0iz = real_zerop (arg0i))))
8927 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8928 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8929 if (arg0rz && arg1i && real_zerop (arg1i))
8931 tree rp = arg1r ? arg1r
8932 : build1 (REALPART_EXPR, rtype, arg1);
8933 tree ip = arg0i ? arg0i
8934 : build1 (IMAGPART_EXPR, rtype, arg0);
8935 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8937 else if (arg0iz && arg1r && real_zerop (arg1r))
8939 tree rp = arg0r ? arg0r
8940 : build1 (REALPART_EXPR, rtype, arg0);
8941 tree ip = arg1i ? arg1i
8942 : build1 (IMAGPART_EXPR, rtype, arg1);
8943 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8948 if (flag_unsafe_math_optimizations
8949 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8950 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8951 && (tem = distribute_real_division (code, type, arg0, arg1)))
8954 /* Convert x+x into x*2.0. */
8955 if (operand_equal_p (arg0, arg1, 0)
8956 && SCALAR_FLOAT_TYPE_P (type))
8957 return fold_build2 (MULT_EXPR, type, arg0,
8958 build_real (type, dconst2));
8960 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8961 if (flag_unsafe_math_optimizations
8962 && TREE_CODE (arg1) == PLUS_EXPR
8963 && TREE_CODE (arg0) != MULT_EXPR)
8965 tree tree10 = TREE_OPERAND (arg1, 0);
8966 tree tree11 = TREE_OPERAND (arg1, 1);
8967 if (TREE_CODE (tree11) == MULT_EXPR
8968 && TREE_CODE (tree10) == MULT_EXPR)
8971 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8972 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8975 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8976 if (flag_unsafe_math_optimizations
8977 && TREE_CODE (arg0) == PLUS_EXPR
8978 && TREE_CODE (arg1) != MULT_EXPR)
8980 tree tree00 = TREE_OPERAND (arg0, 0);
8981 tree tree01 = TREE_OPERAND (arg0, 1);
8982 if (TREE_CODE (tree01) == MULT_EXPR
8983 && TREE_CODE (tree00) == MULT_EXPR)
8986 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8987 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8993 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8994 is a rotate of A by C1 bits. */
8995 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8996 is a rotate of A by B bits. */
8998 enum tree_code code0, code1;
8999 code0 = TREE_CODE (arg0);
9000 code1 = TREE_CODE (arg1);
9001 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9002 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9003 && operand_equal_p (TREE_OPERAND (arg0, 0),
9004 TREE_OPERAND (arg1, 0), 0)
9005 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9007 tree tree01, tree11;
9008 enum tree_code code01, code11;
9010 tree01 = TREE_OPERAND (arg0, 1);
9011 tree11 = TREE_OPERAND (arg1, 1);
9012 STRIP_NOPS (tree01);
9013 STRIP_NOPS (tree11);
9014 code01 = TREE_CODE (tree01);
9015 code11 = TREE_CODE (tree11);
9016 if (code01 == INTEGER_CST
9017 && code11 == INTEGER_CST
9018 && TREE_INT_CST_HIGH (tree01) == 0
9019 && TREE_INT_CST_HIGH (tree11) == 0
9020 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9021 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9022 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9023 code0 == LSHIFT_EXPR ? tree01 : tree11);
9024 else if (code11 == MINUS_EXPR)
9026 tree tree110, tree111;
9027 tree110 = TREE_OPERAND (tree11, 0);
9028 tree111 = TREE_OPERAND (tree11, 1);
9029 STRIP_NOPS (tree110);
9030 STRIP_NOPS (tree111);
9031 if (TREE_CODE (tree110) == INTEGER_CST
9032 && 0 == compare_tree_int (tree110,
9034 (TREE_TYPE (TREE_OPERAND
9036 && operand_equal_p (tree01, tree111, 0))
9037 return build2 ((code0 == LSHIFT_EXPR
9040 type, TREE_OPERAND (arg0, 0), tree01);
9042 else if (code01 == MINUS_EXPR)
9044 tree tree010, tree011;
9045 tree010 = TREE_OPERAND (tree01, 0);
9046 tree011 = TREE_OPERAND (tree01, 1);
9047 STRIP_NOPS (tree010);
9048 STRIP_NOPS (tree011);
9049 if (TREE_CODE (tree010) == INTEGER_CST
9050 && 0 == compare_tree_int (tree010,
9052 (TREE_TYPE (TREE_OPERAND
9054 && operand_equal_p (tree11, tree011, 0))
9055 return build2 ((code0 != LSHIFT_EXPR
9058 type, TREE_OPERAND (arg0, 0), tree11);
9064 /* In most languages, can't associate operations on floats through
9065 parentheses. Rather than remember where the parentheses were, we
9066 don't associate floats at all, unless the user has specified
9067 -funsafe-math-optimizations. */
9069 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9071 tree var0, con0, lit0, minus_lit0;
9072 tree var1, con1, lit1, minus_lit1;
9074 /* Split both trees into variables, constants, and literals. Then
9075 associate each group together, the constants with literals,
9076 then the result with variables. This increases the chances of
9077 literals being recombined later and of generating relocatable
9078 expressions for the sum of a constant and literal. */
9079 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9080 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9081 code == MINUS_EXPR);
9083 /* Only do something if we found more than two objects. Otherwise,
9084 nothing has changed and we risk infinite recursion. */
9085 if (2 < ((var0 != 0) + (var1 != 0)
9086 + (con0 != 0) + (con1 != 0)
9087 + (lit0 != 0) + (lit1 != 0)
9088 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9090 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9091 if (code == MINUS_EXPR)
9094 var0 = associate_trees (var0, var1, code, type);
9095 con0 = associate_trees (con0, con1, code, type);
9096 lit0 = associate_trees (lit0, lit1, code, type);
9097 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9099 /* Preserve the MINUS_EXPR if the negative part of the literal is
9100 greater than the positive part. Otherwise, the multiplicative
9101 folding code (i.e extract_muldiv) may be fooled in case
9102 unsigned constants are subtracted, like in the following
9103 example: ((X*2 + 4) - 8U)/2. */
9104 if (minus_lit0 && lit0)
9106 if (TREE_CODE (lit0) == INTEGER_CST
9107 && TREE_CODE (minus_lit0) == INTEGER_CST
9108 && tree_int_cst_lt (lit0, minus_lit0))
9110 minus_lit0 = associate_trees (minus_lit0, lit0,
9116 lit0 = associate_trees (lit0, minus_lit0,
9124 return fold_convert (type,
9125 associate_trees (var0, minus_lit0,
9129 con0 = associate_trees (con0, minus_lit0,
9131 return fold_convert (type,
9132 associate_trees (var0, con0,
9137 con0 = associate_trees (con0, lit0, code, type);
9138 return fold_convert (type, associate_trees (var0, con0,
9146 /* A - (-B) -> A + B */
9147 if (TREE_CODE (arg1) == NEGATE_EXPR)
9148 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9149 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9150 if (TREE_CODE (arg0) == NEGATE_EXPR
9151 && (FLOAT_TYPE_P (type)
9152 || INTEGRAL_TYPE_P (type))
9153 && negate_expr_p (arg1)
9154 && reorder_operands_p (arg0, arg1))
9155 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9156 TREE_OPERAND (arg0, 0));
9157 /* Convert -A - 1 to ~A. */
9158 if (INTEGRAL_TYPE_P (type)
9159 && TREE_CODE (arg0) == NEGATE_EXPR
9160 && integer_onep (arg1)
9161 && !TYPE_OVERFLOW_TRAPS (type))
9162 return fold_build1 (BIT_NOT_EXPR, type,
9163 fold_convert (type, TREE_OPERAND (arg0, 0)));
9165 /* Convert -1 - A to ~A. */
9166 if (INTEGRAL_TYPE_P (type)
9167 && integer_all_onesp (arg0))
9168 return fold_build1 (BIT_NOT_EXPR, type, op1);
9170 if (! FLOAT_TYPE_P (type))
9172 if (integer_zerop (arg0))
9173 return negate_expr (fold_convert (type, arg1));
9174 if (integer_zerop (arg1))
9175 return non_lvalue (fold_convert (type, arg0));
9177 /* Fold A - (A & B) into ~B & A. */
9178 if (!TREE_SIDE_EFFECTS (arg0)
9179 && TREE_CODE (arg1) == BIT_AND_EXPR)
9181 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9182 return fold_build2 (BIT_AND_EXPR, type,
9183 fold_build1 (BIT_NOT_EXPR, type,
9184 TREE_OPERAND (arg1, 0)),
9186 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9187 return fold_build2 (BIT_AND_EXPR, type,
9188 fold_build1 (BIT_NOT_EXPR, type,
9189 TREE_OPERAND (arg1, 1)),
9193 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9194 any power of 2 minus 1. */
9195 if (TREE_CODE (arg0) == BIT_AND_EXPR
9196 && TREE_CODE (arg1) == BIT_AND_EXPR
9197 && operand_equal_p (TREE_OPERAND (arg0, 0),
9198 TREE_OPERAND (arg1, 0), 0))
9200 tree mask0 = TREE_OPERAND (arg0, 1);
9201 tree mask1 = TREE_OPERAND (arg1, 1);
9202 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9204 if (operand_equal_p (tem, mask1, 0))
9206 tem = fold_build2 (BIT_XOR_EXPR, type,
9207 TREE_OPERAND (arg0, 0), mask1);
9208 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9213 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9214 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9215 return non_lvalue (fold_convert (type, arg0));
9217 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9218 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9219 (-ARG1 + ARG0) reduces to -ARG1. */
9220 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9221 return negate_expr (fold_convert (type, arg1));
9223 /* Fold &x - &x. This can happen from &x.foo - &x.
9224 This is unsafe for certain floats even in non-IEEE formats.
9225 In IEEE, it is unsafe because it does wrong for NaNs.
9226 Also note that operand_equal_p is always false if an operand
9229 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9230 && operand_equal_p (arg0, arg1, 0))
9231 return fold_convert (type, integer_zero_node);
9233 /* A - B -> A + (-B) if B is easily negatable. */
9234 if (negate_expr_p (arg1)
9235 && ((FLOAT_TYPE_P (type)
9236 /* Avoid this transformation if B is a positive REAL_CST. */
9237 && (TREE_CODE (arg1) != REAL_CST
9238 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9239 || INTEGRAL_TYPE_P (type)))
9240 return fold_build2 (PLUS_EXPR, type,
9241 fold_convert (type, arg0),
9242 fold_convert (type, negate_expr (arg1)));
9244 /* Try folding difference of addresses. */
9248 if ((TREE_CODE (arg0) == ADDR_EXPR
9249 || TREE_CODE (arg1) == ADDR_EXPR)
9250 && ptr_difference_const (arg0, arg1, &diff))
9251 return build_int_cst_type (type, diff);
9254 /* Fold &a[i] - &a[j] to i-j. */
9255 if (TREE_CODE (arg0) == ADDR_EXPR
9256 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9257 && TREE_CODE (arg1) == ADDR_EXPR
9258 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9260 tree aref0 = TREE_OPERAND (arg0, 0);
9261 tree aref1 = TREE_OPERAND (arg1, 0);
9262 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9263 TREE_OPERAND (aref1, 0), 0))
9265 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9266 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9267 tree esz = array_ref_element_size (aref0);
9268 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9269 return fold_build2 (MULT_EXPR, type, diff,
9270 fold_convert (type, esz));
9275 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9276 of the array. Loop optimizer sometimes produce this type of
9278 if (TREE_CODE (arg0) == ADDR_EXPR)
9280 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9282 return fold_convert (type, tem);
9285 if (flag_unsafe_math_optimizations
9286 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9287 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9288 && (tem = distribute_real_division (code, type, arg0, arg1)))
9291 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9293 if ((TREE_CODE (arg0) == MULT_EXPR
9294 || TREE_CODE (arg1) == MULT_EXPR)
9295 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9297 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9305 /* (-A) * (-B) -> A * B */
9306 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9307 return fold_build2 (MULT_EXPR, type,
9308 fold_convert (type, TREE_OPERAND (arg0, 0)),
9309 fold_convert (type, negate_expr (arg1)));
9310 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9311 return fold_build2 (MULT_EXPR, type,
9312 fold_convert (type, negate_expr (arg0)),
9313 fold_convert (type, TREE_OPERAND (arg1, 0)));
9315 if (! FLOAT_TYPE_P (type))
9317 if (integer_zerop (arg1))
9318 return omit_one_operand (type, arg1, arg0);
9319 if (integer_onep (arg1))
9320 return non_lvalue (fold_convert (type, arg0));
9321 /* Transform x * -1 into -x. */
9322 if (integer_all_onesp (arg1))
9323 return fold_convert (type, negate_expr (arg0));
9324 /* Transform x * -C into -x * C if x is easily negatable. */
9325 if (TREE_CODE (arg1) == INTEGER_CST
9326 && tree_int_cst_sgn (arg1) == -1
9327 && negate_expr_p (arg0)
9328 && (tem = negate_expr (arg1)) != arg1
9329 && !TREE_OVERFLOW (tem))
9330 return fold_build2 (MULT_EXPR, type,
9331 negate_expr (arg0), tem);
9333 /* (a * (1 << b)) is (a << b) */
9334 if (TREE_CODE (arg1) == LSHIFT_EXPR
9335 && integer_onep (TREE_OPERAND (arg1, 0)))
9336 return fold_build2 (LSHIFT_EXPR, type, arg0,
9337 TREE_OPERAND (arg1, 1));
9338 if (TREE_CODE (arg0) == LSHIFT_EXPR
9339 && integer_onep (TREE_OPERAND (arg0, 0)))
9340 return fold_build2 (LSHIFT_EXPR, type, arg1,
9341 TREE_OPERAND (arg0, 1));
9343 if (TREE_CODE (arg1) == INTEGER_CST
9344 && 0 != (tem = extract_muldiv (op0,
9345 fold_convert (type, arg1),
9347 return fold_convert (type, tem);
9349 /* Optimize z * conj(z) for integer complex numbers. */
9350 if (TREE_CODE (arg0) == CONJ_EXPR
9351 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9352 return fold_mult_zconjz (type, arg1);
9353 if (TREE_CODE (arg1) == CONJ_EXPR
9354 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9355 return fold_mult_zconjz (type, arg0);
9359 /* Maybe fold x * 0 to 0. The expressions aren't the same
9360 when x is NaN, since x * 0 is also NaN. Nor are they the
9361 same in modes with signed zeros, since multiplying a
9362 negative value by 0 gives -0, not +0. */
9363 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9364 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9365 && real_zerop (arg1))
9366 return omit_one_operand (type, arg1, arg0);
9367 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9368 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9369 && real_onep (arg1))
9370 return non_lvalue (fold_convert (type, arg0));
9372 /* Transform x * -1.0 into -x. */
9373 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9374 && real_minus_onep (arg1))
9375 return fold_convert (type, negate_expr (arg0));
9377 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9378 if (flag_unsafe_math_optimizations
9379 && TREE_CODE (arg0) == RDIV_EXPR
9380 && TREE_CODE (arg1) == REAL_CST
9381 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9383 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9386 return fold_build2 (RDIV_EXPR, type, tem,
9387 TREE_OPERAND (arg0, 1));
9390 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9391 if (operand_equal_p (arg0, arg1, 0))
9393 tree tem = fold_strip_sign_ops (arg0);
9394 if (tem != NULL_TREE)
9396 tem = fold_convert (type, tem);
9397 return fold_build2 (MULT_EXPR, type, tem, tem);
9401 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9402 This is not the same for NaNs or if singed zeros are
9404 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9405 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9406 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9407 && TREE_CODE (arg1) == COMPLEX_CST
9408 && real_zerop (TREE_REALPART (arg1)))
9410 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9411 if (real_onep (TREE_IMAGPART (arg1)))
9412 return fold_build2 (COMPLEX_EXPR, type,
9413 negate_expr (fold_build1 (IMAGPART_EXPR,
9415 fold_build1 (REALPART_EXPR, rtype, arg0));
9416 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9417 return fold_build2 (COMPLEX_EXPR, type,
9418 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9419 negate_expr (fold_build1 (REALPART_EXPR,
9423 /* Optimize z * conj(z) for floating point complex numbers.
9424 Guarded by flag_unsafe_math_optimizations as non-finite
9425 imaginary components don't produce scalar results. */
9426 if (flag_unsafe_math_optimizations
9427 && TREE_CODE (arg0) == CONJ_EXPR
9428 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9429 return fold_mult_zconjz (type, arg1);
9430 if (flag_unsafe_math_optimizations
9431 && TREE_CODE (arg1) == CONJ_EXPR
9432 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9433 return fold_mult_zconjz (type, arg0);
9435 if (flag_unsafe_math_optimizations)
9437 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9438 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9440 /* Optimizations of root(...)*root(...). */
9441 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9443 tree rootfn, arg, arglist;
9444 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9445 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9447 /* Optimize sqrt(x)*sqrt(x) as x. */
9448 if (BUILTIN_SQRT_P (fcode0)
9449 && operand_equal_p (arg00, arg10, 0)
9450 && ! HONOR_SNANS (TYPE_MODE (type)))
9453 /* Optimize root(x)*root(y) as root(x*y). */
9454 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9455 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9456 arglist = build_tree_list (NULL_TREE, arg);
9457 return build_function_call_expr (rootfn, arglist);
9460 /* Optimize expN(x)*expN(y) as expN(x+y). */
9461 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9463 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9464 tree arg = fold_build2 (PLUS_EXPR, type,
9465 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9466 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9467 tree arglist = build_tree_list (NULL_TREE, arg);
9468 return build_function_call_expr (expfn, arglist);
9471 /* Optimizations of pow(...)*pow(...). */
9472 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9473 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9474 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9476 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9477 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9479 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9480 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9483 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9484 if (operand_equal_p (arg01, arg11, 0))
9486 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9487 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9488 tree arglist = tree_cons (NULL_TREE, arg,
9489 build_tree_list (NULL_TREE,
9491 return build_function_call_expr (powfn, arglist);
9494 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9495 if (operand_equal_p (arg00, arg10, 0))
9497 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9498 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9499 tree arglist = tree_cons (NULL_TREE, arg00,
9500 build_tree_list (NULL_TREE,
9502 return build_function_call_expr (powfn, arglist);
9506 /* Optimize tan(x)*cos(x) as sin(x). */
9507 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9508 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9509 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9510 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9511 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9512 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9513 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9514 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9516 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9518 if (sinfn != NULL_TREE)
9519 return build_function_call_expr (sinfn,
9520 TREE_OPERAND (arg0, 1));
9523 /* Optimize x*pow(x,c) as pow(x,c+1). */
9524 if (fcode1 == BUILT_IN_POW
9525 || fcode1 == BUILT_IN_POWF
9526 || fcode1 == BUILT_IN_POWL)
9528 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9529 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9531 if (TREE_CODE (arg11) == REAL_CST
9532 && !TREE_OVERFLOW (arg11)
9533 && operand_equal_p (arg0, arg10, 0))
9535 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9539 c = TREE_REAL_CST (arg11);
9540 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9541 arg = build_real (type, c);
9542 arglist = build_tree_list (NULL_TREE, arg);
9543 arglist = tree_cons (NULL_TREE, arg0, arglist);
9544 return build_function_call_expr (powfn, arglist);
9548 /* Optimize pow(x,c)*x as pow(x,c+1). */
9549 if (fcode0 == BUILT_IN_POW
9550 || fcode0 == BUILT_IN_POWF
9551 || fcode0 == BUILT_IN_POWL)
9553 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9554 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9556 if (TREE_CODE (arg01) == REAL_CST
9557 && !TREE_OVERFLOW (arg01)
9558 && operand_equal_p (arg1, arg00, 0))
9560 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9564 c = TREE_REAL_CST (arg01);
9565 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9566 arg = build_real (type, c);
9567 arglist = build_tree_list (NULL_TREE, arg);
9568 arglist = tree_cons (NULL_TREE, arg1, arglist);
9569 return build_function_call_expr (powfn, arglist);
9573 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9575 && operand_equal_p (arg0, arg1, 0))
9577 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9581 tree arg = build_real (type, dconst2);
9582 tree arglist = build_tree_list (NULL_TREE, arg);
9583 arglist = tree_cons (NULL_TREE, arg0, arglist);
9584 return build_function_call_expr (powfn, arglist);
9593 if (integer_all_onesp (arg1))
9594 return omit_one_operand (type, arg1, arg0);
9595 if (integer_zerop (arg1))
9596 return non_lvalue (fold_convert (type, arg0));
9597 if (operand_equal_p (arg0, arg1, 0))
9598 return non_lvalue (fold_convert (type, arg0));
9601 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9604 t1 = build_int_cst_type (type, -1);
9605 return omit_one_operand (type, t1, arg1);
9609 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9612 t1 = build_int_cst_type (type, -1);
9613 return omit_one_operand (type, t1, arg0);
9616 /* Canonicalize (X & C1) | C2. */
9617 if (TREE_CODE (arg0) == BIT_AND_EXPR
9618 && TREE_CODE (arg1) == INTEGER_CST
9619 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9621 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9622 int width = TYPE_PRECISION (type);
9623 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9624 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9625 hi2 = TREE_INT_CST_HIGH (arg1);
9626 lo2 = TREE_INT_CST_LOW (arg1);
9628 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9629 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9630 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9632 if (width > HOST_BITS_PER_WIDE_INT)
9634 mhi = (unsigned HOST_WIDE_INT) -1
9635 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9641 mlo = (unsigned HOST_WIDE_INT) -1
9642 >> (HOST_BITS_PER_WIDE_INT - width);
9645 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9646 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9647 return fold_build2 (BIT_IOR_EXPR, type,
9648 TREE_OPERAND (arg0, 0), arg1);
9650 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9653 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9654 return fold_build2 (BIT_IOR_EXPR, type,
9655 fold_build2 (BIT_AND_EXPR, type,
9656 TREE_OPERAND (arg0, 0),
9657 build_int_cst_wide (type,
9663 /* (X & Y) | Y is (X, Y). */
9664 if (TREE_CODE (arg0) == BIT_AND_EXPR
9665 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9666 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9667 /* (X & Y) | X is (Y, X). */
9668 if (TREE_CODE (arg0) == BIT_AND_EXPR
9669 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9670 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9671 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9672 /* X | (X & Y) is (Y, X). */
9673 if (TREE_CODE (arg1) == BIT_AND_EXPR
9674 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9675 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9676 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9677 /* X | (Y & X) is (Y, X). */
9678 if (TREE_CODE (arg1) == BIT_AND_EXPR
9679 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9680 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9681 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9683 t1 = distribute_bit_expr (code, type, arg0, arg1);
9684 if (t1 != NULL_TREE)
9687 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9689 This results in more efficient code for machines without a NAND
9690 instruction. Combine will canonicalize to the first form
9691 which will allow use of NAND instructions provided by the
9692 backend if they exist. */
9693 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9694 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9696 return fold_build1 (BIT_NOT_EXPR, type,
9697 build2 (BIT_AND_EXPR, type,
9698 TREE_OPERAND (arg0, 0),
9699 TREE_OPERAND (arg1, 0)));
9702 /* See if this can be simplified into a rotate first. If that
9703 is unsuccessful continue in the association code. */
9707 if (integer_zerop (arg1))
9708 return non_lvalue (fold_convert (type, arg0));
9709 if (integer_all_onesp (arg1))
9710 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9711 if (operand_equal_p (arg0, arg1, 0))
9712 return omit_one_operand (type, integer_zero_node, arg0);
9715 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9716 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9718 t1 = build_int_cst_type (type, -1);
9719 return omit_one_operand (type, t1, arg1);
9723 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9724 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9726 t1 = build_int_cst_type (type, -1);
9727 return omit_one_operand (type, t1, arg0);
9730 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9731 with a constant, and the two constants have no bits in common,
9732 we should treat this as a BIT_IOR_EXPR since this may produce more
9734 if (TREE_CODE (arg0) == BIT_AND_EXPR
9735 && TREE_CODE (arg1) == BIT_AND_EXPR
9736 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9737 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9738 && integer_zerop (const_binop (BIT_AND_EXPR,
9739 TREE_OPERAND (arg0, 1),
9740 TREE_OPERAND (arg1, 1), 0)))
9742 code = BIT_IOR_EXPR;
9746 /* (X | Y) ^ X -> Y & ~ X*/
9747 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9748 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9750 tree t2 = TREE_OPERAND (arg0, 1);
9751 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9753 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9754 fold_convert (type, t1));
9758 /* (Y | X) ^ X -> Y & ~ X*/
9759 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9760 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9762 tree t2 = TREE_OPERAND (arg0, 0);
9763 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9765 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9766 fold_convert (type, t1));
9770 /* X ^ (X | Y) -> Y & ~ X*/
9771 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9772 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9774 tree t2 = TREE_OPERAND (arg1, 1);
9775 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9777 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9778 fold_convert (type, t1));
9782 /* X ^ (Y | X) -> Y & ~ X*/
9783 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9784 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9786 tree t2 = TREE_OPERAND (arg1, 0);
9787 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9789 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9790 fold_convert (type, t1));
9794 /* Convert ~X ^ ~Y to X ^ Y. */
9795 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9796 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9797 return fold_build2 (code, type,
9798 fold_convert (type, TREE_OPERAND (arg0, 0)),
9799 fold_convert (type, TREE_OPERAND (arg1, 0)));
9801 /* Convert ~X ^ C to X ^ ~C. */
9802 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9803 && TREE_CODE (arg1) == INTEGER_CST)
9804 return fold_build2 (code, type,
9805 fold_convert (type, TREE_OPERAND (arg0, 0)),
9806 fold_build1 (BIT_NOT_EXPR, type, arg1));
9808 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9809 if (TREE_CODE (arg0) == BIT_AND_EXPR
9810 && integer_onep (TREE_OPERAND (arg0, 1))
9811 && integer_onep (arg1))
9812 return fold_build2 (EQ_EXPR, type, arg0,
9813 build_int_cst (TREE_TYPE (arg0), 0));
9815 /* Fold (X & Y) ^ Y as ~X & Y. */
9816 if (TREE_CODE (arg0) == BIT_AND_EXPR
9817 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9819 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9820 return fold_build2 (BIT_AND_EXPR, type,
9821 fold_build1 (BIT_NOT_EXPR, type, tem),
9822 fold_convert (type, arg1));
9824 /* Fold (X & Y) ^ X as ~Y & X. */
9825 if (TREE_CODE (arg0) == BIT_AND_EXPR
9826 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9827 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9829 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9830 return fold_build2 (BIT_AND_EXPR, type,
9831 fold_build1 (BIT_NOT_EXPR, type, tem),
9832 fold_convert (type, arg1));
9834 /* Fold X ^ (X & Y) as X & ~Y. */
9835 if (TREE_CODE (arg1) == BIT_AND_EXPR
9836 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9838 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9839 return fold_build2 (BIT_AND_EXPR, type,
9840 fold_convert (type, arg0),
9841 fold_build1 (BIT_NOT_EXPR, type, tem));
9843 /* Fold X ^ (Y & X) as ~Y & X. */
9844 if (TREE_CODE (arg1) == BIT_AND_EXPR
9845 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9846 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9848 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9849 return fold_build2 (BIT_AND_EXPR, type,
9850 fold_build1 (BIT_NOT_EXPR, type, tem),
9851 fold_convert (type, arg0));
9854 /* See if this can be simplified into a rotate first. If that
9855 is unsuccessful continue in the association code. */
9859 if (integer_all_onesp (arg1))
9860 return non_lvalue (fold_convert (type, arg0));
9861 if (integer_zerop (arg1))
9862 return omit_one_operand (type, arg1, arg0);
9863 if (operand_equal_p (arg0, arg1, 0))
9864 return non_lvalue (fold_convert (type, arg0));
9866 /* ~X & X is always zero. */
9867 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9868 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9869 return omit_one_operand (type, integer_zero_node, arg1);
9871 /* X & ~X is always zero. */
9872 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9873 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9874 return omit_one_operand (type, integer_zero_node, arg0);
9876 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9877 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9878 && TREE_CODE (arg1) == INTEGER_CST
9879 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9880 return fold_build2 (BIT_IOR_EXPR, type,
9881 fold_build2 (BIT_AND_EXPR, type,
9882 TREE_OPERAND (arg0, 0), arg1),
9883 fold_build2 (BIT_AND_EXPR, type,
9884 TREE_OPERAND (arg0, 1), arg1));
9886 /* (X | Y) & Y is (X, Y). */
9887 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9888 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9889 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9890 /* (X | Y) & X is (Y, X). */
9891 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9892 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9893 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9894 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9895 /* X & (X | Y) is (Y, X). */
9896 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9897 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9898 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9899 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9900 /* X & (Y | X) is (Y, X). */
9901 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9902 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9903 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9904 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9906 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9907 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9908 && integer_onep (TREE_OPERAND (arg0, 1))
9909 && integer_onep (arg1))
9911 tem = TREE_OPERAND (arg0, 0);
9912 return fold_build2 (EQ_EXPR, type,
9913 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9914 build_int_cst (TREE_TYPE (tem), 1)),
9915 build_int_cst (TREE_TYPE (tem), 0));
9917 /* Fold ~X & 1 as (X & 1) == 0. */
9918 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9919 && integer_onep (arg1))
9921 tem = TREE_OPERAND (arg0, 0);
9922 return fold_build2 (EQ_EXPR, type,
9923 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9924 build_int_cst (TREE_TYPE (tem), 1)),
9925 build_int_cst (TREE_TYPE (tem), 0));
9928 /* Fold (X ^ Y) & Y as ~X & Y. */
9929 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9930 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9932 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9933 return fold_build2 (BIT_AND_EXPR, type,
9934 fold_build1 (BIT_NOT_EXPR, type, tem),
9935 fold_convert (type, arg1));
9937 /* Fold (X ^ Y) & X as ~Y & X. */
9938 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9939 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9940 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9942 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9943 return fold_build2 (BIT_AND_EXPR, type,
9944 fold_build1 (BIT_NOT_EXPR, type, tem),
9945 fold_convert (type, arg1));
9947 /* Fold X & (X ^ Y) as X & ~Y. */
9948 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9949 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9951 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9952 return fold_build2 (BIT_AND_EXPR, type,
9953 fold_convert (type, arg0),
9954 fold_build1 (BIT_NOT_EXPR, type, tem));
9956 /* Fold X & (Y ^ X) as ~Y & X. */
9957 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9958 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9959 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9961 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9962 return fold_build2 (BIT_AND_EXPR, type,
9963 fold_build1 (BIT_NOT_EXPR, type, tem),
9964 fold_convert (type, arg0));
9967 t1 = distribute_bit_expr (code, type, arg0, arg1);
9968 if (t1 != NULL_TREE)
9970 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9971 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9972 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9975 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9977 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9978 && (~TREE_INT_CST_LOW (arg1)
9979 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9980 return fold_convert (type, TREE_OPERAND (arg0, 0));
9983 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9985 This results in more efficient code for machines without a NOR
9986 instruction. Combine will canonicalize to the first form
9987 which will allow use of NOR instructions provided by the
9988 backend if they exist. */
9989 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9990 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9992 return fold_build1 (BIT_NOT_EXPR, type,
9993 build2 (BIT_IOR_EXPR, type,
9994 TREE_OPERAND (arg0, 0),
9995 TREE_OPERAND (arg1, 0)));
10001 /* Don't touch a floating-point divide by zero unless the mode
10002 of the constant can represent infinity. */
10003 if (TREE_CODE (arg1) == REAL_CST
10004 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10005 && real_zerop (arg1))
10008 /* Optimize A / A to 1.0 if we don't care about
10009 NaNs or Infinities. Skip the transformation
10010 for non-real operands. */
10011 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10012 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10013 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10014 && operand_equal_p (arg0, arg1, 0))
10016 tree r = build_real (TREE_TYPE (arg0), dconst1);
10018 return omit_two_operands (type, r, arg0, arg1);
10021 /* The complex version of the above A / A optimization. */
10022 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10023 && operand_equal_p (arg0, arg1, 0))
10025 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10026 if (! HONOR_NANS (TYPE_MODE (elem_type))
10027 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10029 tree r = build_real (elem_type, dconst1);
10030 /* omit_two_operands will call fold_convert for us. */
10031 return omit_two_operands (type, r, arg0, arg1);
10035 /* (-A) / (-B) -> A / B */
10036 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10037 return fold_build2 (RDIV_EXPR, type,
10038 TREE_OPERAND (arg0, 0),
10039 negate_expr (arg1));
10040 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10041 return fold_build2 (RDIV_EXPR, type,
10042 negate_expr (arg0),
10043 TREE_OPERAND (arg1, 0));
10045 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10046 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10047 && real_onep (arg1))
10048 return non_lvalue (fold_convert (type, arg0));
10050 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10051 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10052 && real_minus_onep (arg1))
10053 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10055 /* If ARG1 is a constant, we can convert this to a multiply by the
10056 reciprocal. This does not have the same rounding properties,
10057 so only do this if -funsafe-math-optimizations. We can actually
10058 always safely do it if ARG1 is a power of two, but it's hard to
10059 tell if it is or not in a portable manner. */
10060 if (TREE_CODE (arg1) == REAL_CST)
10062 if (flag_unsafe_math_optimizations
10063 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10065 return fold_build2 (MULT_EXPR, type, arg0, tem);
10066 /* Find the reciprocal if optimizing and the result is exact. */
10070 r = TREE_REAL_CST (arg1);
10071 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10073 tem = build_real (type, r);
10074 return fold_build2 (MULT_EXPR, type,
10075 fold_convert (type, arg0), tem);
10079 /* Convert A/B/C to A/(B*C). */
10080 if (flag_unsafe_math_optimizations
10081 && TREE_CODE (arg0) == RDIV_EXPR)
10082 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10083 fold_build2 (MULT_EXPR, type,
10084 TREE_OPERAND (arg0, 1), arg1));
10086 /* Convert A/(B/C) to (A/B)*C. */
10087 if (flag_unsafe_math_optimizations
10088 && TREE_CODE (arg1) == RDIV_EXPR)
10089 return fold_build2 (MULT_EXPR, type,
10090 fold_build2 (RDIV_EXPR, type, arg0,
10091 TREE_OPERAND (arg1, 0)),
10092 TREE_OPERAND (arg1, 1));
10094 /* Convert C1/(X*C2) into (C1/C2)/X. */
10095 if (flag_unsafe_math_optimizations
10096 && TREE_CODE (arg1) == MULT_EXPR
10097 && TREE_CODE (arg0) == REAL_CST
10098 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10100 tree tem = const_binop (RDIV_EXPR, arg0,
10101 TREE_OPERAND (arg1, 1), 0);
10103 return fold_build2 (RDIV_EXPR, type, tem,
10104 TREE_OPERAND (arg1, 0));
10107 if (flag_unsafe_math_optimizations)
10109 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10110 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10112 /* Optimize sin(x)/cos(x) as tan(x). */
10113 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10114 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10115 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10116 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10117 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10119 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10121 if (tanfn != NULL_TREE)
10122 return build_function_call_expr (tanfn,
10123 TREE_OPERAND (arg0, 1));
10126 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10127 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10128 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10129 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10130 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10131 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10133 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10135 if (tanfn != NULL_TREE)
10137 tree tmp = TREE_OPERAND (arg0, 1);
10138 tmp = build_function_call_expr (tanfn, tmp);
10139 return fold_build2 (RDIV_EXPR, type,
10140 build_real (type, dconst1), tmp);
10144 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10145 NaNs or Infinities. */
10146 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10147 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10148 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10150 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10151 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10153 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10154 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10155 && operand_equal_p (arg00, arg01, 0))
10157 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10159 if (cosfn != NULL_TREE)
10160 return build_function_call_expr (cosfn,
10161 TREE_OPERAND (arg0, 1));
10165 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10166 NaNs or Infinities. */
10167 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10168 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10169 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10171 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10172 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10174 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10175 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10176 && operand_equal_p (arg00, arg01, 0))
10178 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10180 if (cosfn != NULL_TREE)
10182 tree tmp = TREE_OPERAND (arg0, 1);
10183 tmp = build_function_call_expr (cosfn, tmp);
10184 return fold_build2 (RDIV_EXPR, type,
10185 build_real (type, dconst1),
10191 /* Optimize pow(x,c)/x as pow(x,c-1). */
10192 if (fcode0 == BUILT_IN_POW
10193 || fcode0 == BUILT_IN_POWF
10194 || fcode0 == BUILT_IN_POWL)
10196 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10197 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10198 if (TREE_CODE (arg01) == REAL_CST
10199 && !TREE_OVERFLOW (arg01)
10200 && operand_equal_p (arg1, arg00, 0))
10202 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10206 c = TREE_REAL_CST (arg01);
10207 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10208 arg = build_real (type, c);
10209 arglist = build_tree_list (NULL_TREE, arg);
10210 arglist = tree_cons (NULL_TREE, arg1, arglist);
10211 return build_function_call_expr (powfn, arglist);
10215 /* Optimize x/expN(y) into x*expN(-y). */
10216 if (BUILTIN_EXPONENT_P (fcode1))
10218 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10219 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10220 tree arglist = build_tree_list (NULL_TREE,
10221 fold_convert (type, arg));
10222 arg1 = build_function_call_expr (expfn, arglist);
10223 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10226 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10227 if (fcode1 == BUILT_IN_POW
10228 || fcode1 == BUILT_IN_POWF
10229 || fcode1 == BUILT_IN_POWL)
10231 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10232 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10233 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10234 tree neg11 = fold_convert (type, negate_expr (arg11));
10235 tree arglist = tree_cons(NULL_TREE, arg10,
10236 build_tree_list (NULL_TREE, neg11));
10237 arg1 = build_function_call_expr (powfn, arglist);
10238 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10243 case TRUNC_DIV_EXPR:
10244 case FLOOR_DIV_EXPR:
10245 /* Simplify A / (B << N) where A and B are positive and B is
10246 a power of 2, to A >> (N + log2(B)). */
10247 if (TREE_CODE (arg1) == LSHIFT_EXPR
10248 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10250 tree sval = TREE_OPERAND (arg1, 0);
10251 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10253 tree sh_cnt = TREE_OPERAND (arg1, 1);
10254 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10256 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10257 sh_cnt, build_int_cst (NULL_TREE, pow2));
10258 return fold_build2 (RSHIFT_EXPR, type,
10259 fold_convert (type, arg0), sh_cnt);
10264 case ROUND_DIV_EXPR:
10265 case CEIL_DIV_EXPR:
10266 case EXACT_DIV_EXPR:
10267 if (integer_onep (arg1))
10268 return non_lvalue (fold_convert (type, arg0));
10269 if (integer_zerop (arg1))
10271 /* X / -1 is -X. */
10272 if (!TYPE_UNSIGNED (type)
10273 && TREE_CODE (arg1) == INTEGER_CST
10274 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10275 && TREE_INT_CST_HIGH (arg1) == -1)
10276 return fold_convert (type, negate_expr (arg0));
10278 /* Convert -A / -B to A / B when the type is signed and overflow is
10280 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10281 && TREE_CODE (arg0) == NEGATE_EXPR
10282 && negate_expr_p (arg1))
10283 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10284 negate_expr (arg1));
10285 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10286 && TREE_CODE (arg1) == NEGATE_EXPR
10287 && negate_expr_p (arg0))
10288 return fold_build2 (code, type, negate_expr (arg0),
10289 TREE_OPERAND (arg1, 0));
10291 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10292 operation, EXACT_DIV_EXPR.
10294 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10295 At one time others generated faster code, it's not clear if they do
10296 after the last round to changes to the DIV code in expmed.c. */
10297 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10298 && multiple_of_p (type, arg0, arg1))
10299 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10301 if (TREE_CODE (arg1) == INTEGER_CST
10302 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10303 return fold_convert (type, tem);
10307 case CEIL_MOD_EXPR:
10308 case FLOOR_MOD_EXPR:
10309 case ROUND_MOD_EXPR:
10310 case TRUNC_MOD_EXPR:
10311 /* X % 1 is always zero, but be sure to preserve any side
10313 if (integer_onep (arg1))
10314 return omit_one_operand (type, integer_zero_node, arg0);
10316 /* X % 0, return X % 0 unchanged so that we can get the
10317 proper warnings and errors. */
10318 if (integer_zerop (arg1))
10321 /* 0 % X is always zero, but be sure to preserve any side
10322 effects in X. Place this after checking for X == 0. */
10323 if (integer_zerop (arg0))
10324 return omit_one_operand (type, integer_zero_node, arg1);
10326 /* X % -1 is zero. */
10327 if (!TYPE_UNSIGNED (type)
10328 && TREE_CODE (arg1) == INTEGER_CST
10329 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10330 && TREE_INT_CST_HIGH (arg1) == -1)
10331 return omit_one_operand (type, integer_zero_node, arg0);
10333 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10334 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10335 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10336 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10339 /* Also optimize A % (C << N) where C is a power of 2,
10340 to A & ((C << N) - 1). */
10341 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10342 c = TREE_OPERAND (arg1, 0);
10344 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10346 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10347 build_int_cst (TREE_TYPE (arg1), 1));
10348 return fold_build2 (BIT_AND_EXPR, type,
10349 fold_convert (type, arg0),
10350 fold_convert (type, mask));
10354 /* X % -C is the same as X % C. */
10355 if (code == TRUNC_MOD_EXPR
10356 && !TYPE_UNSIGNED (type)
10357 && TREE_CODE (arg1) == INTEGER_CST
10358 && !TREE_OVERFLOW (arg1)
10359 && TREE_INT_CST_HIGH (arg1) < 0
10360 && !TYPE_OVERFLOW_TRAPS (type)
10361 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10362 && !sign_bit_p (arg1, arg1))
10363 return fold_build2 (code, type, fold_convert (type, arg0),
10364 fold_convert (type, negate_expr (arg1)));
10366 /* X % -Y is the same as X % Y. */
10367 if (code == TRUNC_MOD_EXPR
10368 && !TYPE_UNSIGNED (type)
10369 && TREE_CODE (arg1) == NEGATE_EXPR
10370 && !TYPE_OVERFLOW_TRAPS (type))
10371 return fold_build2 (code, type, fold_convert (type, arg0),
10372 fold_convert (type, TREE_OPERAND (arg1, 0)));
10374 if (TREE_CODE (arg1) == INTEGER_CST
10375 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10376 return fold_convert (type, tem);
10382 if (integer_all_onesp (arg0))
10383 return omit_one_operand (type, arg0, arg1);
10387 /* Optimize -1 >> x for arithmetic right shifts. */
10388 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10389 return omit_one_operand (type, arg0, arg1);
10390 /* ... fall through ... */
10394 if (integer_zerop (arg1))
10395 return non_lvalue (fold_convert (type, arg0));
10396 if (integer_zerop (arg0))
10397 return omit_one_operand (type, arg0, arg1);
10399 /* Since negative shift count is not well-defined,
10400 don't try to compute it in the compiler. */
10401 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10404 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10405 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10406 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10407 && host_integerp (TREE_OPERAND (arg0, 1), false)
10408 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10410 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10411 + TREE_INT_CST_LOW (arg1));
10413 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10414 being well defined. */
10415 if (low >= TYPE_PRECISION (type))
10417 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10418 low = low % TYPE_PRECISION (type);
10419 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10420 return build_int_cst (type, 0);
10422 low = TYPE_PRECISION (type) - 1;
10425 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10426 build_int_cst (type, low));
10429 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10430 into x & ((unsigned)-1 >> c) for unsigned types. */
10431 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10432 || (TYPE_UNSIGNED (type)
10433 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10434 && host_integerp (arg1, false)
10435 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10436 && host_integerp (TREE_OPERAND (arg0, 1), false)
10437 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10439 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10440 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10446 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10448 lshift = build_int_cst (type, -1);
10449 lshift = int_const_binop (code, lshift, arg1, 0);
10451 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10455 /* Rewrite an LROTATE_EXPR by a constant into an
10456 RROTATE_EXPR by a new constant. */
10457 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10459 tree tem = build_int_cst (TREE_TYPE (arg1),
10460 GET_MODE_BITSIZE (TYPE_MODE (type)));
10461 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10462 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10465 /* If we have a rotate of a bit operation with the rotate count and
10466 the second operand of the bit operation both constant,
10467 permute the two operations. */
10468 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10469 && (TREE_CODE (arg0) == BIT_AND_EXPR
10470 || TREE_CODE (arg0) == BIT_IOR_EXPR
10471 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10472 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10473 return fold_build2 (TREE_CODE (arg0), type,
10474 fold_build2 (code, type,
10475 TREE_OPERAND (arg0, 0), arg1),
10476 fold_build2 (code, type,
10477 TREE_OPERAND (arg0, 1), arg1));
10479 /* Two consecutive rotates adding up to the width of the mode can
10481 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10482 && TREE_CODE (arg0) == RROTATE_EXPR
10483 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10484 && TREE_INT_CST_HIGH (arg1) == 0
10485 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10486 && ((TREE_INT_CST_LOW (arg1)
10487 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10488 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10489 return TREE_OPERAND (arg0, 0);
10494 if (operand_equal_p (arg0, arg1, 0))
10495 return omit_one_operand (type, arg0, arg1);
10496 if (INTEGRAL_TYPE_P (type)
10497 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10498 return omit_one_operand (type, arg1, arg0);
10499 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10505 if (operand_equal_p (arg0, arg1, 0))
10506 return omit_one_operand (type, arg0, arg1);
10507 if (INTEGRAL_TYPE_P (type)
10508 && TYPE_MAX_VALUE (type)
10509 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10510 return omit_one_operand (type, arg1, arg0);
10511 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10516 case TRUTH_ANDIF_EXPR:
10517 /* Note that the operands of this must be ints
10518 and their values must be 0 or 1.
10519 ("true" is a fixed value perhaps depending on the language.) */
10520 /* If first arg is constant zero, return it. */
10521 if (integer_zerop (arg0))
10522 return fold_convert (type, arg0);
10523 case TRUTH_AND_EXPR:
10524 /* If either arg is constant true, drop it. */
10525 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10526 return non_lvalue (fold_convert (type, arg1));
10527 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10528 /* Preserve sequence points. */
10529 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10530 return non_lvalue (fold_convert (type, arg0));
10531 /* If second arg is constant zero, result is zero, but first arg
10532 must be evaluated. */
10533 if (integer_zerop (arg1))
10534 return omit_one_operand (type, arg1, arg0);
10535 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10536 case will be handled here. */
10537 if (integer_zerop (arg0))
10538 return omit_one_operand (type, arg0, arg1);
10540 /* !X && X is always false. */
10541 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10542 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10543 return omit_one_operand (type, integer_zero_node, arg1);
10544 /* X && !X is always false. */
10545 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10546 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10547 return omit_one_operand (type, integer_zero_node, arg0);
10549 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10550 means A >= Y && A != MAX, but in this case we know that
10553 if (!TREE_SIDE_EFFECTS (arg0)
10554 && !TREE_SIDE_EFFECTS (arg1))
10556 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10557 if (tem && !operand_equal_p (tem, arg0, 0))
10558 return fold_build2 (code, type, tem, arg1);
10560 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10561 if (tem && !operand_equal_p (tem, arg1, 0))
10562 return fold_build2 (code, type, arg0, tem);
10566 /* We only do these simplifications if we are optimizing. */
10570 /* Check for things like (A || B) && (A || C). We can convert this
10571 to A || (B && C). Note that either operator can be any of the four
10572 truth and/or operations and the transformation will still be
10573 valid. Also note that we only care about order for the
10574 ANDIF and ORIF operators. If B contains side effects, this
10575 might change the truth-value of A. */
10576 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10577 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10578 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10579 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10580 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10581 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10583 tree a00 = TREE_OPERAND (arg0, 0);
10584 tree a01 = TREE_OPERAND (arg0, 1);
10585 tree a10 = TREE_OPERAND (arg1, 0);
10586 tree a11 = TREE_OPERAND (arg1, 1);
10587 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10588 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10589 && (code == TRUTH_AND_EXPR
10590 || code == TRUTH_OR_EXPR));
10592 if (operand_equal_p (a00, a10, 0))
10593 return fold_build2 (TREE_CODE (arg0), type, a00,
10594 fold_build2 (code, type, a01, a11));
10595 else if (commutative && operand_equal_p (a00, a11, 0))
10596 return fold_build2 (TREE_CODE (arg0), type, a00,
10597 fold_build2 (code, type, a01, a10));
10598 else if (commutative && operand_equal_p (a01, a10, 0))
10599 return fold_build2 (TREE_CODE (arg0), type, a01,
10600 fold_build2 (code, type, a00, a11));
10602 /* This case if tricky because we must either have commutative
10603 operators or else A10 must not have side-effects. */
10605 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10606 && operand_equal_p (a01, a11, 0))
10607 return fold_build2 (TREE_CODE (arg0), type,
10608 fold_build2 (code, type, a00, a10),
10612 /* See if we can build a range comparison. */
10613 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10616 /* Check for the possibility of merging component references. If our
10617 lhs is another similar operation, try to merge its rhs with our
10618 rhs. Then try to merge our lhs and rhs. */
10619 if (TREE_CODE (arg0) == code
10620 && 0 != (tem = fold_truthop (code, type,
10621 TREE_OPERAND (arg0, 1), arg1)))
10622 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10624 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10629 case TRUTH_ORIF_EXPR:
10630 /* Note that the operands of this must be ints
10631 and their values must be 0 or true.
10632 ("true" is a fixed value perhaps depending on the language.) */
10633 /* If first arg is constant true, return it. */
10634 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10635 return fold_convert (type, arg0);
10636 case TRUTH_OR_EXPR:
10637 /* If either arg is constant zero, drop it. */
10638 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10639 return non_lvalue (fold_convert (type, arg1));
10640 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10641 /* Preserve sequence points. */
10642 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10643 return non_lvalue (fold_convert (type, arg0));
10644 /* If second arg is constant true, result is true, but we must
10645 evaluate first arg. */
10646 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10647 return omit_one_operand (type, arg1, arg0);
10648 /* Likewise for first arg, but note this only occurs here for
10650 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10651 return omit_one_operand (type, arg0, arg1);
10653 /* !X || X is always true. */
10654 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10655 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10656 return omit_one_operand (type, integer_one_node, arg1);
10657 /* X || !X is always true. */
10658 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10659 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10660 return omit_one_operand (type, integer_one_node, arg0);
10664 case TRUTH_XOR_EXPR:
10665 /* If the second arg is constant zero, drop it. */
10666 if (integer_zerop (arg1))
10667 return non_lvalue (fold_convert (type, arg0));
10668 /* If the second arg is constant true, this is a logical inversion. */
10669 if (integer_onep (arg1))
10671 /* Only call invert_truthvalue if operand is a truth value. */
10672 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10673 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10675 tem = invert_truthvalue (arg0);
10676 return non_lvalue (fold_convert (type, tem));
10678 /* Identical arguments cancel to zero. */
10679 if (operand_equal_p (arg0, arg1, 0))
10680 return omit_one_operand (type, integer_zero_node, arg0);
10682 /* !X ^ X is always true. */
10683 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10684 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10685 return omit_one_operand (type, integer_one_node, arg1);
10687 /* X ^ !X is always true. */
10688 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10689 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10690 return omit_one_operand (type, integer_one_node, arg0);
10696 tem = fold_comparison (code, type, op0, op1);
10697 if (tem != NULL_TREE)
10700 /* bool_var != 0 becomes bool_var. */
10701 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10702 && code == NE_EXPR)
10703 return non_lvalue (fold_convert (type, arg0));
10705 /* bool_var == 1 becomes bool_var. */
10706 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10707 && code == EQ_EXPR)
10708 return non_lvalue (fold_convert (type, arg0));
10710 /* bool_var != 1 becomes !bool_var. */
10711 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10712 && code == NE_EXPR)
10713 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10715 /* bool_var == 0 becomes !bool_var. */
10716 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10717 && code == EQ_EXPR)
10718 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10720 /* If this is an equality comparison of the address of a non-weak
10721 object against zero, then we know the result. */
10722 if (TREE_CODE (arg0) == ADDR_EXPR
10723 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10724 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10725 && integer_zerop (arg1))
10726 return constant_boolean_node (code != EQ_EXPR, type);
10728 /* If this is an equality comparison of the address of two non-weak,
10729 unaliased symbols neither of which are extern (since we do not
10730 have access to attributes for externs), then we know the result. */
10731 if (TREE_CODE (arg0) == ADDR_EXPR
10732 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10733 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10734 && ! lookup_attribute ("alias",
10735 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10736 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10737 && TREE_CODE (arg1) == ADDR_EXPR
10738 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10739 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10740 && ! lookup_attribute ("alias",
10741 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10742 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10744 /* We know that we're looking at the address of two
10745 non-weak, unaliased, static _DECL nodes.
10747 It is both wasteful and incorrect to call operand_equal_p
10748 to compare the two ADDR_EXPR nodes. It is wasteful in that
10749 all we need to do is test pointer equality for the arguments
10750 to the two ADDR_EXPR nodes. It is incorrect to use
10751 operand_equal_p as that function is NOT equivalent to a
10752 C equality test. It can in fact return false for two
10753 objects which would test as equal using the C equality
10755 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10756 return constant_boolean_node (equal
10757 ? code == EQ_EXPR : code != EQ_EXPR,
10761 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10762 a MINUS_EXPR of a constant, we can convert it into a comparison with
10763 a revised constant as long as no overflow occurs. */
10764 if (TREE_CODE (arg1) == INTEGER_CST
10765 && (TREE_CODE (arg0) == PLUS_EXPR
10766 || TREE_CODE (arg0) == MINUS_EXPR)
10767 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10768 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10769 ? MINUS_EXPR : PLUS_EXPR,
10770 fold_convert (TREE_TYPE (arg0), arg1),
10771 TREE_OPERAND (arg0, 1), 0))
10772 && !TREE_OVERFLOW (tem))
10773 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10775 /* Similarly for a NEGATE_EXPR. */
10776 if (TREE_CODE (arg0) == NEGATE_EXPR
10777 && TREE_CODE (arg1) == INTEGER_CST
10778 && 0 != (tem = negate_expr (arg1))
10779 && TREE_CODE (tem) == INTEGER_CST
10780 && !TREE_OVERFLOW (tem))
10781 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10783 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10784 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10785 && TREE_CODE (arg1) == INTEGER_CST
10786 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10787 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10788 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10789 fold_convert (TREE_TYPE (arg0), arg1),
10790 TREE_OPERAND (arg0, 1)));
10792 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10793 for !=. Don't do this for ordered comparisons due to overflow. */
10794 if (TREE_CODE (arg0) == MINUS_EXPR
10795 && integer_zerop (arg1))
10796 return fold_build2 (code, type,
10797 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10799 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10800 if (TREE_CODE (arg0) == ABS_EXPR
10801 && (integer_zerop (arg1) || real_zerop (arg1)))
10802 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10804 /* If this is an EQ or NE comparison with zero and ARG0 is
10805 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10806 two operations, but the latter can be done in one less insn
10807 on machines that have only two-operand insns or on which a
10808 constant cannot be the first operand. */
10809 if (TREE_CODE (arg0) == BIT_AND_EXPR
10810 && integer_zerop (arg1))
10812 tree arg00 = TREE_OPERAND (arg0, 0);
10813 tree arg01 = TREE_OPERAND (arg0, 1);
10814 if (TREE_CODE (arg00) == LSHIFT_EXPR
10815 && integer_onep (TREE_OPERAND (arg00, 0)))
10817 fold_build2 (code, type,
10818 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10819 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10820 arg01, TREE_OPERAND (arg00, 1)),
10821 fold_convert (TREE_TYPE (arg0),
10822 integer_one_node)),
10824 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10825 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10827 fold_build2 (code, type,
10828 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10829 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10830 arg00, TREE_OPERAND (arg01, 1)),
10831 fold_convert (TREE_TYPE (arg0),
10832 integer_one_node)),
10836 /* If this is an NE or EQ comparison of zero against the result of a
10837 signed MOD operation whose second operand is a power of 2, make
10838 the MOD operation unsigned since it is simpler and equivalent. */
10839 if (integer_zerop (arg1)
10840 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10841 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10842 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10843 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10844 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10845 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10847 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10848 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10849 fold_convert (newtype,
10850 TREE_OPERAND (arg0, 0)),
10851 fold_convert (newtype,
10852 TREE_OPERAND (arg0, 1)));
10854 return fold_build2 (code, type, newmod,
10855 fold_convert (newtype, arg1));
10858 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10859 C1 is a valid shift constant, and C2 is a power of two, i.e.
10861 if (TREE_CODE (arg0) == BIT_AND_EXPR
10862 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10863 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10865 && integer_pow2p (TREE_OPERAND (arg0, 1))
10866 && integer_zerop (arg1))
10868 tree itype = TREE_TYPE (arg0);
10869 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10870 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10872 /* Check for a valid shift count. */
10873 if (TREE_INT_CST_HIGH (arg001) == 0
10874 && TREE_INT_CST_LOW (arg001) < prec)
10876 tree arg01 = TREE_OPERAND (arg0, 1);
10877 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10878 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10879 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10880 can be rewritten as (X & (C2 << C1)) != 0. */
10881 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10883 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10884 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10885 return fold_build2 (code, type, tem, arg1);
10887 /* Otherwise, for signed (arithmetic) shifts,
10888 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10889 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10890 else if (!TYPE_UNSIGNED (itype))
10891 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10892 arg000, build_int_cst (itype, 0));
10893 /* Otherwise, of unsigned (logical) shifts,
10894 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10895 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10897 return omit_one_operand (type,
10898 code == EQ_EXPR ? integer_one_node
10899 : integer_zero_node,
10904 /* If this is an NE comparison of zero with an AND of one, remove the
10905 comparison since the AND will give the correct value. */
10906 if (code == NE_EXPR
10907 && integer_zerop (arg1)
10908 && TREE_CODE (arg0) == BIT_AND_EXPR
10909 && integer_onep (TREE_OPERAND (arg0, 1)))
10910 return fold_convert (type, arg0);
10912 /* If we have (A & C) == C where C is a power of 2, convert this into
10913 (A & C) != 0. Similarly for NE_EXPR. */
10914 if (TREE_CODE (arg0) == BIT_AND_EXPR
10915 && integer_pow2p (TREE_OPERAND (arg0, 1))
10916 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10917 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10918 arg0, fold_convert (TREE_TYPE (arg0),
10919 integer_zero_node));
10921 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10922 bit, then fold the expression into A < 0 or A >= 0. */
10923 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10927 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10928 Similarly for NE_EXPR. */
10929 if (TREE_CODE (arg0) == BIT_AND_EXPR
10930 && TREE_CODE (arg1) == INTEGER_CST
10931 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10933 tree notc = fold_build1 (BIT_NOT_EXPR,
10934 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10935 TREE_OPERAND (arg0, 1));
10936 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10938 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10939 if (integer_nonzerop (dandnotc))
10940 return omit_one_operand (type, rslt, arg0);
10943 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10944 Similarly for NE_EXPR. */
10945 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10946 && TREE_CODE (arg1) == INTEGER_CST
10947 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10949 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10950 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10951 TREE_OPERAND (arg0, 1), notd);
10952 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10953 if (integer_nonzerop (candnotd))
10954 return omit_one_operand (type, rslt, arg0);
10957 /* If this is a comparison of a field, we may be able to simplify it. */
10958 if ((TREE_CODE (arg0) == COMPONENT_REF
10959 || TREE_CODE (arg0) == BIT_FIELD_REF)
10960 /* Handle the constant case even without -O
10961 to make sure the warnings are given. */
10962 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10964 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10969 /* Optimize comparisons of strlen vs zero to a compare of the
10970 first character of the string vs zero. To wit,
10971 strlen(ptr) == 0 => *ptr == 0
10972 strlen(ptr) != 0 => *ptr != 0
10973 Other cases should reduce to one of these two (or a constant)
10974 due to the return value of strlen being unsigned. */
10975 if (TREE_CODE (arg0) == CALL_EXPR
10976 && integer_zerop (arg1))
10978 tree fndecl = get_callee_fndecl (arg0);
10982 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10983 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10984 && (arglist = TREE_OPERAND (arg0, 1))
10985 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10986 && ! TREE_CHAIN (arglist))
10988 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10989 return fold_build2 (code, type, iref,
10990 build_int_cst (TREE_TYPE (iref), 0));
10994 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10995 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10996 if (TREE_CODE (arg0) == RSHIFT_EXPR
10997 && integer_zerop (arg1)
10998 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11000 tree arg00 = TREE_OPERAND (arg0, 0);
11001 tree arg01 = TREE_OPERAND (arg0, 1);
11002 tree itype = TREE_TYPE (arg00);
11003 if (TREE_INT_CST_HIGH (arg01) == 0
11004 && TREE_INT_CST_LOW (arg01)
11005 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11007 if (TYPE_UNSIGNED (itype))
11009 itype = lang_hooks.types.signed_type (itype);
11010 arg00 = fold_convert (itype, arg00);
11012 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11013 type, arg00, build_int_cst (itype, 0));
11017 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11018 if (integer_zerop (arg1)
11019 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11020 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11021 TREE_OPERAND (arg0, 1));
11023 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11024 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11025 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11026 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11027 build_int_cst (TREE_TYPE (arg1), 0));
11028 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11029 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11030 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11031 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11032 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11033 build_int_cst (TREE_TYPE (arg1), 0));
11035 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11036 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11037 && TREE_CODE (arg1) == INTEGER_CST
11038 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11039 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11040 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11041 TREE_OPERAND (arg0, 1), arg1));
11043 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11044 (X & C) == 0 when C is a single bit. */
11045 if (TREE_CODE (arg0) == BIT_AND_EXPR
11046 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11047 && integer_zerop (arg1)
11048 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11050 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11051 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11052 TREE_OPERAND (arg0, 1));
11053 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11057 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11058 constant C is a power of two, i.e. a single bit. */
11059 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11060 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11061 && integer_zerop (arg1)
11062 && integer_pow2p (TREE_OPERAND (arg0, 1))
11063 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11064 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11066 tree arg00 = TREE_OPERAND (arg0, 0);
11067 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11068 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11071 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11072 when is C is a power of two, i.e. a single bit. */
11073 if (TREE_CODE (arg0) == BIT_AND_EXPR
11074 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11075 && integer_zerop (arg1)
11076 && integer_pow2p (TREE_OPERAND (arg0, 1))
11077 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11078 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11080 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11081 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11082 arg000, TREE_OPERAND (arg0, 1));
11083 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11084 tem, build_int_cst (TREE_TYPE (tem), 0));
11087 if (integer_zerop (arg1)
11088 && tree_expr_nonzero_p (arg0))
11090 tree res = constant_boolean_node (code==NE_EXPR, type);
11091 return omit_one_operand (type, res, arg0);
11094 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11095 if (TREE_CODE (arg0) == NEGATE_EXPR
11096 && TREE_CODE (arg1) == NEGATE_EXPR)
11097 return fold_build2 (code, type,
11098 TREE_OPERAND (arg0, 0),
11099 TREE_OPERAND (arg1, 0));
11101 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11102 if (TREE_CODE (arg0) == BIT_AND_EXPR
11103 && TREE_CODE (arg1) == BIT_AND_EXPR)
11105 tree arg00 = TREE_OPERAND (arg0, 0);
11106 tree arg01 = TREE_OPERAND (arg0, 1);
11107 tree arg10 = TREE_OPERAND (arg1, 0);
11108 tree arg11 = TREE_OPERAND (arg1, 1);
11109 tree itype = TREE_TYPE (arg0);
11111 if (operand_equal_p (arg01, arg11, 0))
11112 return fold_build2 (code, type,
11113 fold_build2 (BIT_AND_EXPR, itype,
11114 fold_build2 (BIT_XOR_EXPR, itype,
11117 build_int_cst (itype, 0));
11119 if (operand_equal_p (arg01, arg10, 0))
11120 return fold_build2 (code, type,
11121 fold_build2 (BIT_AND_EXPR, itype,
11122 fold_build2 (BIT_XOR_EXPR, itype,
11125 build_int_cst (itype, 0));
11127 if (operand_equal_p (arg00, arg11, 0))
11128 return fold_build2 (code, type,
11129 fold_build2 (BIT_AND_EXPR, itype,
11130 fold_build2 (BIT_XOR_EXPR, itype,
11133 build_int_cst (itype, 0));
11135 if (operand_equal_p (arg00, arg10, 0))
11136 return fold_build2 (code, type,
11137 fold_build2 (BIT_AND_EXPR, itype,
11138 fold_build2 (BIT_XOR_EXPR, itype,
11141 build_int_cst (itype, 0));
11144 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11145 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11147 tree arg00 = TREE_OPERAND (arg0, 0);
11148 tree arg01 = TREE_OPERAND (arg0, 1);
11149 tree arg10 = TREE_OPERAND (arg1, 0);
11150 tree arg11 = TREE_OPERAND (arg1, 1);
11151 tree itype = TREE_TYPE (arg0);
11153 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11154 operand_equal_p guarantees no side-effects so we don't need
11155 to use omit_one_operand on Z. */
11156 if (operand_equal_p (arg01, arg11, 0))
11157 return fold_build2 (code, type, arg00, arg10);
11158 if (operand_equal_p (arg01, arg10, 0))
11159 return fold_build2 (code, type, arg00, arg11);
11160 if (operand_equal_p (arg00, arg11, 0))
11161 return fold_build2 (code, type, arg01, arg10);
11162 if (operand_equal_p (arg00, arg10, 0))
11163 return fold_build2 (code, type, arg01, arg11);
11165 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11166 if (TREE_CODE (arg01) == INTEGER_CST
11167 && TREE_CODE (arg11) == INTEGER_CST)
11168 return fold_build2 (code, type,
11169 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11170 fold_build2 (BIT_XOR_EXPR, itype,
11180 tem = fold_comparison (code, type, op0, op1);
11181 if (tem != NULL_TREE)
11184 /* Transform comparisons of the form X +- C CMP X. */
11185 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11186 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11187 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11188 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11189 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11190 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11192 tree arg01 = TREE_OPERAND (arg0, 1);
11193 enum tree_code code0 = TREE_CODE (arg0);
11196 if (TREE_CODE (arg01) == REAL_CST)
11197 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11199 is_positive = tree_int_cst_sgn (arg01);
11201 /* (X - c) > X becomes false. */
11202 if (code == GT_EXPR
11203 && ((code0 == MINUS_EXPR && is_positive >= 0)
11204 || (code0 == PLUS_EXPR && is_positive <= 0)))
11205 return constant_boolean_node (0, type);
11207 /* Likewise (X + c) < X becomes false. */
11208 if (code == LT_EXPR
11209 && ((code0 == PLUS_EXPR && is_positive >= 0)
11210 || (code0 == MINUS_EXPR && is_positive <= 0)))
11211 return constant_boolean_node (0, type);
11213 /* Convert (X - c) <= X to true. */
11214 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11216 && ((code0 == MINUS_EXPR && is_positive >= 0)
11217 || (code0 == PLUS_EXPR && is_positive <= 0)))
11218 return constant_boolean_node (1, type);
11220 /* Convert (X + c) >= X to true. */
11221 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11223 && ((code0 == PLUS_EXPR && is_positive >= 0)
11224 || (code0 == MINUS_EXPR && is_positive <= 0)))
11225 return constant_boolean_node (1, type);
11227 if (TREE_CODE (arg01) == INTEGER_CST)
11229 /* Convert X + c > X and X - c < X to true for integers. */
11230 if (code == GT_EXPR
11231 && ((code0 == PLUS_EXPR && is_positive > 0)
11232 || (code0 == MINUS_EXPR && is_positive < 0)))
11233 return constant_boolean_node (1, type);
11235 if (code == LT_EXPR
11236 && ((code0 == MINUS_EXPR && is_positive > 0)
11237 || (code0 == PLUS_EXPR && is_positive < 0)))
11238 return constant_boolean_node (1, type);
11240 /* Convert X + c <= X and X - c >= X to false for integers. */
11241 if (code == LE_EXPR
11242 && ((code0 == PLUS_EXPR && is_positive > 0)
11243 || (code0 == MINUS_EXPR && is_positive < 0)))
11244 return constant_boolean_node (0, type);
11246 if (code == GE_EXPR
11247 && ((code0 == MINUS_EXPR && is_positive > 0)
11248 || (code0 == PLUS_EXPR && is_positive < 0)))
11249 return constant_boolean_node (0, type);
11253 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11254 This transformation affects the cases which are handled in later
11255 optimizations involving comparisons with non-negative constants. */
11256 if (TREE_CODE (arg1) == INTEGER_CST
11257 && TREE_CODE (arg0) != INTEGER_CST
11258 && tree_int_cst_sgn (arg1) > 0)
11260 if (code == GE_EXPR)
11262 arg1 = const_binop (MINUS_EXPR, arg1,
11263 build_int_cst (TREE_TYPE (arg1), 1), 0);
11264 return fold_build2 (GT_EXPR, type, arg0,
11265 fold_convert (TREE_TYPE (arg0), arg1));
11267 if (code == LT_EXPR)
11269 arg1 = const_binop (MINUS_EXPR, arg1,
11270 build_int_cst (TREE_TYPE (arg1), 1), 0);
11271 return fold_build2 (LE_EXPR, type, arg0,
11272 fold_convert (TREE_TYPE (arg0), arg1));
11276 /* Comparisons with the highest or lowest possible integer of
11277 the specified precision will have known values. */
11279 tree arg1_type = TREE_TYPE (arg1);
11280 unsigned int width = TYPE_PRECISION (arg1_type);
11282 if (TREE_CODE (arg1) == INTEGER_CST
11283 && !TREE_OVERFLOW (arg1)
11284 && width <= 2 * HOST_BITS_PER_WIDE_INT
11285 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11287 HOST_WIDE_INT signed_max_hi;
11288 unsigned HOST_WIDE_INT signed_max_lo;
11289 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11291 if (width <= HOST_BITS_PER_WIDE_INT)
11293 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11298 if (TYPE_UNSIGNED (arg1_type))
11300 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11306 max_lo = signed_max_lo;
11307 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11313 width -= HOST_BITS_PER_WIDE_INT;
11314 signed_max_lo = -1;
11315 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11320 if (TYPE_UNSIGNED (arg1_type))
11322 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11327 max_hi = signed_max_hi;
11328 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11332 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11333 && TREE_INT_CST_LOW (arg1) == max_lo)
11337 return omit_one_operand (type, integer_zero_node, arg0);
11340 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11343 return omit_one_operand (type, integer_one_node, arg0);
11346 return fold_build2 (NE_EXPR, type, arg0, arg1);
11348 /* The GE_EXPR and LT_EXPR cases above are not normally
11349 reached because of previous transformations. */
11354 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11356 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11360 arg1 = const_binop (PLUS_EXPR, arg1,
11361 build_int_cst (TREE_TYPE (arg1), 1), 0);
11362 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11364 arg1 = const_binop (PLUS_EXPR, arg1,
11365 build_int_cst (TREE_TYPE (arg1), 1), 0);
11366 return fold_build2 (NE_EXPR, type, arg0, arg1);
11370 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11372 && TREE_INT_CST_LOW (arg1) == min_lo)
11376 return omit_one_operand (type, integer_zero_node, arg0);
11379 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11382 return omit_one_operand (type, integer_one_node, arg0);
11385 return fold_build2 (NE_EXPR, type, op0, op1);
11390 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11392 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11396 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11397 return fold_build2 (NE_EXPR, type, arg0, arg1);
11399 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11400 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11405 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11406 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11407 && TYPE_UNSIGNED (arg1_type)
11408 /* We will flip the signedness of the comparison operator
11409 associated with the mode of arg1, so the sign bit is
11410 specified by this mode. Check that arg1 is the signed
11411 max associated with this sign bit. */
11412 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11413 /* signed_type does not work on pointer types. */
11414 && INTEGRAL_TYPE_P (arg1_type))
11416 /* The following case also applies to X < signed_max+1
11417 and X >= signed_max+1 because previous transformations. */
11418 if (code == LE_EXPR || code == GT_EXPR)
11421 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11422 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11423 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11424 type, fold_convert (st0, arg0),
11425 build_int_cst (st1, 0));
11431 /* If we are comparing an ABS_EXPR with a constant, we can
11432 convert all the cases into explicit comparisons, but they may
11433 well not be faster than doing the ABS and one comparison.
11434 But ABS (X) <= C is a range comparison, which becomes a subtraction
11435 and a comparison, and is probably faster. */
11436 if (code == LE_EXPR
11437 && TREE_CODE (arg1) == INTEGER_CST
11438 && TREE_CODE (arg0) == ABS_EXPR
11439 && ! TREE_SIDE_EFFECTS (arg0)
11440 && (0 != (tem = negate_expr (arg1)))
11441 && TREE_CODE (tem) == INTEGER_CST
11442 && !TREE_OVERFLOW (tem))
11443 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11444 build2 (GE_EXPR, type,
11445 TREE_OPERAND (arg0, 0), tem),
11446 build2 (LE_EXPR, type,
11447 TREE_OPERAND (arg0, 0), arg1));
11449 /* Convert ABS_EXPR<x> >= 0 to true. */
11450 if (code == GE_EXPR
11451 && tree_expr_nonnegative_p (arg0)
11452 && (integer_zerop (arg1)
11453 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11454 && real_zerop (arg1))))
11455 return omit_one_operand (type, integer_one_node, arg0);
11457 /* Convert ABS_EXPR<x> < 0 to false. */
11458 if (code == LT_EXPR
11459 && tree_expr_nonnegative_p (arg0)
11460 && (integer_zerop (arg1) || real_zerop (arg1)))
11461 return omit_one_operand (type, integer_zero_node, arg0);
11463 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11464 and similarly for >= into !=. */
11465 if ((code == LT_EXPR || code == GE_EXPR)
11466 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11467 && TREE_CODE (arg1) == LSHIFT_EXPR
11468 && integer_onep (TREE_OPERAND (arg1, 0)))
11469 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11470 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11471 TREE_OPERAND (arg1, 1)),
11472 build_int_cst (TREE_TYPE (arg0), 0));
11474 if ((code == LT_EXPR || code == GE_EXPR)
11475 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11476 && (TREE_CODE (arg1) == NOP_EXPR
11477 || TREE_CODE (arg1) == CONVERT_EXPR)
11478 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11479 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11481 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11482 fold_convert (TREE_TYPE (arg0),
11483 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11484 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11486 build_int_cst (TREE_TYPE (arg0), 0));
11490 case UNORDERED_EXPR:
11498 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11500 t1 = fold_relational_const (code, type, arg0, arg1);
11501 if (t1 != NULL_TREE)
11505 /* If the first operand is NaN, the result is constant. */
11506 if (TREE_CODE (arg0) == REAL_CST
11507 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11508 && (code != LTGT_EXPR || ! flag_trapping_math))
11510 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11511 ? integer_zero_node
11512 : integer_one_node;
11513 return omit_one_operand (type, t1, arg1);
11516 /* If the second operand is NaN, the result is constant. */
11517 if (TREE_CODE (arg1) == REAL_CST
11518 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11519 && (code != LTGT_EXPR || ! flag_trapping_math))
11521 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11522 ? integer_zero_node
11523 : integer_one_node;
11524 return omit_one_operand (type, t1, arg0);
11527 /* Simplify unordered comparison of something with itself. */
11528 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11529 && operand_equal_p (arg0, arg1, 0))
11530 return constant_boolean_node (1, type);
11532 if (code == LTGT_EXPR
11533 && !flag_trapping_math
11534 && operand_equal_p (arg0, arg1, 0))
11535 return constant_boolean_node (0, type);
11537 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11539 tree targ0 = strip_float_extensions (arg0);
11540 tree targ1 = strip_float_extensions (arg1);
11541 tree newtype = TREE_TYPE (targ0);
11543 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11544 newtype = TREE_TYPE (targ1);
11546 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11547 return fold_build2 (code, type, fold_convert (newtype, targ0),
11548 fold_convert (newtype, targ1));
11553 case COMPOUND_EXPR:
11554 /* When pedantic, a compound expression can be neither an lvalue
11555 nor an integer constant expression. */
11556 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11558 /* Don't let (0, 0) be null pointer constant. */
11559 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11560 : fold_convert (type, arg1);
11561 return pedantic_non_lvalue (tem);
11564 if ((TREE_CODE (arg0) == REAL_CST
11565 && TREE_CODE (arg1) == REAL_CST)
11566 || (TREE_CODE (arg0) == INTEGER_CST
11567 && TREE_CODE (arg1) == INTEGER_CST))
11568 return build_complex (type, arg0, arg1);
11572 /* An ASSERT_EXPR should never be passed to fold_binary. */
11573 gcc_unreachable ();
11577 } /* switch (code) */
11580 /* Callback for walk_tree, looking for LABEL_EXPR.
11581 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11582 Do not check the sub-tree of GOTO_EXPR. */
11585 contains_label_1 (tree *tp,
11586 int *walk_subtrees,
11587 void *data ATTRIBUTE_UNUSED)
11589 switch (TREE_CODE (*tp))
11594 *walk_subtrees = 0;
11601 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11602 accessible from outside the sub-tree. Returns NULL_TREE if no
11603 addressable label is found. */
11606 contains_label_p (tree st)
11608 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11611 /* Fold a ternary expression of code CODE and type TYPE with operands
11612 OP0, OP1, and OP2. Return the folded expression if folding is
11613 successful. Otherwise, return NULL_TREE. */
11616 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11619 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11620 enum tree_code_class kind = TREE_CODE_CLASS (code);
11622 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11623 && TREE_CODE_LENGTH (code) == 3);
11625 /* Strip any conversions that don't change the mode. This is safe
11626 for every expression, except for a comparison expression because
11627 its signedness is derived from its operands. So, in the latter
11628 case, only strip conversions that don't change the signedness.
11630 Note that this is done as an internal manipulation within the
11631 constant folder, in order to find the simplest representation of
11632 the arguments so that their form can be studied. In any cases,
11633 the appropriate type conversions should be put back in the tree
11634 that will get out of the constant folder. */
11649 case COMPONENT_REF:
11650 if (TREE_CODE (arg0) == CONSTRUCTOR
11651 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11653 unsigned HOST_WIDE_INT idx;
11655 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11662 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11663 so all simple results must be passed through pedantic_non_lvalue. */
11664 if (TREE_CODE (arg0) == INTEGER_CST)
11666 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11667 tem = integer_zerop (arg0) ? op2 : op1;
11668 /* Only optimize constant conditions when the selected branch
11669 has the same type as the COND_EXPR. This avoids optimizing
11670 away "c ? x : throw", where the throw has a void type.
11671 Avoid throwing away that operand which contains label. */
11672 if ((!TREE_SIDE_EFFECTS (unused_op)
11673 || !contains_label_p (unused_op))
11674 && (! VOID_TYPE_P (TREE_TYPE (tem))
11675 || VOID_TYPE_P (type)))
11676 return pedantic_non_lvalue (tem);
11679 if (operand_equal_p (arg1, op2, 0))
11680 return pedantic_omit_one_operand (type, arg1, arg0);
11682 /* If we have A op B ? A : C, we may be able to convert this to a
11683 simpler expression, depending on the operation and the values
11684 of B and C. Signed zeros prevent all of these transformations,
11685 for reasons given above each one.
11687 Also try swapping the arguments and inverting the conditional. */
11688 if (COMPARISON_CLASS_P (arg0)
11689 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11690 arg1, TREE_OPERAND (arg0, 1))
11691 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11693 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11698 if (COMPARISON_CLASS_P (arg0)
11699 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11701 TREE_OPERAND (arg0, 1))
11702 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11704 tem = fold_truth_not_expr (arg0);
11705 if (tem && COMPARISON_CLASS_P (tem))
11707 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11713 /* If the second operand is simpler than the third, swap them
11714 since that produces better jump optimization results. */
11715 if (truth_value_p (TREE_CODE (arg0))
11716 && tree_swap_operands_p (op1, op2, false))
11718 /* See if this can be inverted. If it can't, possibly because
11719 it was a floating-point inequality comparison, don't do
11721 tem = fold_truth_not_expr (arg0);
11723 return fold_build3 (code, type, tem, op2, op1);
11726 /* Convert A ? 1 : 0 to simply A. */
11727 if (integer_onep (op1)
11728 && integer_zerop (op2)
11729 /* If we try to convert OP0 to our type, the
11730 call to fold will try to move the conversion inside
11731 a COND, which will recurse. In that case, the COND_EXPR
11732 is probably the best choice, so leave it alone. */
11733 && type == TREE_TYPE (arg0))
11734 return pedantic_non_lvalue (arg0);
11736 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11737 over COND_EXPR in cases such as floating point comparisons. */
11738 if (integer_zerop (op1)
11739 && integer_onep (op2)
11740 && truth_value_p (TREE_CODE (arg0)))
11741 return pedantic_non_lvalue (fold_convert (type,
11742 invert_truthvalue (arg0)));
11744 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11745 if (TREE_CODE (arg0) == LT_EXPR
11746 && integer_zerop (TREE_OPERAND (arg0, 1))
11747 && integer_zerop (op2)
11748 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11750 /* sign_bit_p only checks ARG1 bits within A's precision.
11751 If <sign bit of A> has wider type than A, bits outside
11752 of A's precision in <sign bit of A> need to be checked.
11753 If they are all 0, this optimization needs to be done
11754 in unsigned A's type, if they are all 1 in signed A's type,
11755 otherwise this can't be done. */
11756 if (TYPE_PRECISION (TREE_TYPE (tem))
11757 < TYPE_PRECISION (TREE_TYPE (arg1))
11758 && TYPE_PRECISION (TREE_TYPE (tem))
11759 < TYPE_PRECISION (type))
11761 unsigned HOST_WIDE_INT mask_lo;
11762 HOST_WIDE_INT mask_hi;
11763 int inner_width, outer_width;
11766 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11767 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11768 if (outer_width > TYPE_PRECISION (type))
11769 outer_width = TYPE_PRECISION (type);
11771 if (outer_width > HOST_BITS_PER_WIDE_INT)
11773 mask_hi = ((unsigned HOST_WIDE_INT) -1
11774 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11780 mask_lo = ((unsigned HOST_WIDE_INT) -1
11781 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11783 if (inner_width > HOST_BITS_PER_WIDE_INT)
11785 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11786 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11790 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11791 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11793 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11794 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11796 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11797 tem = fold_convert (tem_type, tem);
11799 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11800 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11802 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11803 tem = fold_convert (tem_type, tem);
11810 return fold_convert (type,
11811 fold_build2 (BIT_AND_EXPR,
11812 TREE_TYPE (tem), tem,
11813 fold_convert (TREE_TYPE (tem),
11817 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11818 already handled above. */
11819 if (TREE_CODE (arg0) == BIT_AND_EXPR
11820 && integer_onep (TREE_OPERAND (arg0, 1))
11821 && integer_zerop (op2)
11822 && integer_pow2p (arg1))
11824 tree tem = TREE_OPERAND (arg0, 0);
11826 if (TREE_CODE (tem) == RSHIFT_EXPR
11827 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11828 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11829 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11830 return fold_build2 (BIT_AND_EXPR, type,
11831 TREE_OPERAND (tem, 0), arg1);
11834 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11835 is probably obsolete because the first operand should be a
11836 truth value (that's why we have the two cases above), but let's
11837 leave it in until we can confirm this for all front-ends. */
11838 if (integer_zerop (op2)
11839 && TREE_CODE (arg0) == NE_EXPR
11840 && integer_zerop (TREE_OPERAND (arg0, 1))
11841 && integer_pow2p (arg1)
11842 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11843 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11844 arg1, OEP_ONLY_CONST))
11845 return pedantic_non_lvalue (fold_convert (type,
11846 TREE_OPERAND (arg0, 0)));
11848 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11849 if (integer_zerop (op2)
11850 && truth_value_p (TREE_CODE (arg0))
11851 && truth_value_p (TREE_CODE (arg1)))
11852 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11853 fold_convert (type, arg0),
11856 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11857 if (integer_onep (op2)
11858 && truth_value_p (TREE_CODE (arg0))
11859 && truth_value_p (TREE_CODE (arg1)))
11861 /* Only perform transformation if ARG0 is easily inverted. */
11862 tem = fold_truth_not_expr (arg0);
11864 return fold_build2 (TRUTH_ORIF_EXPR, type,
11865 fold_convert (type, tem),
11869 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11870 if (integer_zerop (arg1)
11871 && truth_value_p (TREE_CODE (arg0))
11872 && truth_value_p (TREE_CODE (op2)))
11874 /* Only perform transformation if ARG0 is easily inverted. */
11875 tem = fold_truth_not_expr (arg0);
11877 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11878 fold_convert (type, tem),
11882 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11883 if (integer_onep (arg1)
11884 && truth_value_p (TREE_CODE (arg0))
11885 && truth_value_p (TREE_CODE (op2)))
11886 return fold_build2 (TRUTH_ORIF_EXPR, type,
11887 fold_convert (type, arg0),
11893 /* Check for a built-in function. */
11894 if (TREE_CODE (op0) == ADDR_EXPR
11895 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11896 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11897 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11900 case BIT_FIELD_REF:
11901 if (TREE_CODE (arg0) == VECTOR_CST
11902 && type == TREE_TYPE (TREE_TYPE (arg0))
11903 && host_integerp (arg1, 1)
11904 && host_integerp (op2, 1))
11906 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11907 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11910 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11911 && (idx % width) == 0
11912 && (idx = idx / width)
11913 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11915 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11916 while (idx-- > 0 && elements)
11917 elements = TREE_CHAIN (elements);
11919 return TREE_VALUE (elements);
11921 return fold_convert (type, integer_zero_node);
11928 } /* switch (code) */
11931 /* Perform constant folding and related simplification of EXPR.
11932 The related simplifications include x*1 => x, x*0 => 0, etc.,
11933 and application of the associative law.
11934 NOP_EXPR conversions may be removed freely (as long as we
11935 are careful not to change the type of the overall expression).
11936 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11937 but we can constant-fold them if they have constant operands. */
11939 #ifdef ENABLE_FOLD_CHECKING
11940 # define fold(x) fold_1 (x)
11941 static tree fold_1 (tree);
11947 const tree t = expr;
11948 enum tree_code code = TREE_CODE (t);
11949 enum tree_code_class kind = TREE_CODE_CLASS (code);
11952 /* Return right away if a constant. */
11953 if (kind == tcc_constant)
11956 if (IS_EXPR_CODE_CLASS (kind)
11957 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11959 tree type = TREE_TYPE (t);
11960 tree op0, op1, op2;
11962 switch (TREE_CODE_LENGTH (code))
11965 op0 = TREE_OPERAND (t, 0);
11966 tem = fold_unary (code, type, op0);
11967 return tem ? tem : expr;
11969 op0 = TREE_OPERAND (t, 0);
11970 op1 = TREE_OPERAND (t, 1);
11971 tem = fold_binary (code, type, op0, op1);
11972 return tem ? tem : expr;
11974 op0 = TREE_OPERAND (t, 0);
11975 op1 = TREE_OPERAND (t, 1);
11976 op2 = TREE_OPERAND (t, 2);
11977 tem = fold_ternary (code, type, op0, op1, op2);
11978 return tem ? tem : expr;
11987 return fold (DECL_INITIAL (t));
11991 } /* switch (code) */
11994 #ifdef ENABLE_FOLD_CHECKING
11997 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11998 static void fold_check_failed (tree, tree);
11999 void print_fold_checksum (tree);
12001 /* When --enable-checking=fold, compute a digest of expr before
12002 and after actual fold call to see if fold did not accidentally
12003 change original expr. */
12009 struct md5_ctx ctx;
12010 unsigned char checksum_before[16], checksum_after[16];
12013 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12014 md5_init_ctx (&ctx);
12015 fold_checksum_tree (expr, &ctx, ht);
12016 md5_finish_ctx (&ctx, checksum_before);
12019 ret = fold_1 (expr);
12021 md5_init_ctx (&ctx);
12022 fold_checksum_tree (expr, &ctx, ht);
12023 md5_finish_ctx (&ctx, checksum_after);
12026 if (memcmp (checksum_before, checksum_after, 16))
12027 fold_check_failed (expr, ret);
12033 print_fold_checksum (tree expr)
12035 struct md5_ctx ctx;
12036 unsigned char checksum[16], cnt;
12039 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12040 md5_init_ctx (&ctx);
12041 fold_checksum_tree (expr, &ctx, ht);
12042 md5_finish_ctx (&ctx, checksum);
12044 for (cnt = 0; cnt < 16; ++cnt)
12045 fprintf (stderr, "%02x", checksum[cnt]);
12046 putc ('\n', stderr);
12050 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12052 internal_error ("fold check: original tree changed by fold");
12056 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12059 enum tree_code code;
12060 struct tree_function_decl buf;
12065 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12066 <= sizeof (struct tree_function_decl))
12067 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12070 slot = htab_find_slot (ht, expr, INSERT);
12074 code = TREE_CODE (expr);
12075 if (TREE_CODE_CLASS (code) == tcc_declaration
12076 && DECL_ASSEMBLER_NAME_SET_P (expr))
12078 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12079 memcpy ((char *) &buf, expr, tree_size (expr));
12080 expr = (tree) &buf;
12081 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12083 else if (TREE_CODE_CLASS (code) == tcc_type
12084 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12085 || TYPE_CACHED_VALUES_P (expr)
12086 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12088 /* Allow these fields to be modified. */
12089 memcpy ((char *) &buf, expr, tree_size (expr));
12090 expr = (tree) &buf;
12091 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12092 TYPE_POINTER_TO (expr) = NULL;
12093 TYPE_REFERENCE_TO (expr) = NULL;
12094 if (TYPE_CACHED_VALUES_P (expr))
12096 TYPE_CACHED_VALUES_P (expr) = 0;
12097 TYPE_CACHED_VALUES (expr) = NULL;
12100 md5_process_bytes (expr, tree_size (expr), ctx);
12101 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12102 if (TREE_CODE_CLASS (code) != tcc_type
12103 && TREE_CODE_CLASS (code) != tcc_declaration
12104 && code != TREE_LIST)
12105 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12106 switch (TREE_CODE_CLASS (code))
12112 md5_process_bytes (TREE_STRING_POINTER (expr),
12113 TREE_STRING_LENGTH (expr), ctx);
12116 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12117 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12120 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12126 case tcc_exceptional:
12130 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12131 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12132 expr = TREE_CHAIN (expr);
12133 goto recursive_label;
12136 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12137 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12143 case tcc_expression:
12144 case tcc_reference:
12145 case tcc_comparison:
12148 case tcc_statement:
12149 len = TREE_CODE_LENGTH (code);
12150 for (i = 0; i < len; ++i)
12151 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12153 case tcc_declaration:
12154 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12155 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12156 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12158 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12159 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12160 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12161 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12162 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12164 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12165 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12167 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12169 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12170 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12171 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12175 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12176 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12177 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12178 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12179 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12180 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12181 if (INTEGRAL_TYPE_P (expr)
12182 || SCALAR_FLOAT_TYPE_P (expr))
12184 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12185 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12187 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12188 if (TREE_CODE (expr) == RECORD_TYPE
12189 || TREE_CODE (expr) == UNION_TYPE
12190 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12191 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12192 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12201 /* Fold a unary tree expression with code CODE of type TYPE with an
12202 operand OP0. Return a folded expression if successful. Otherwise,
12203 return a tree expression with code CODE of type TYPE with an
12207 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12210 #ifdef ENABLE_FOLD_CHECKING
12211 unsigned char checksum_before[16], checksum_after[16];
12212 struct md5_ctx ctx;
12215 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12216 md5_init_ctx (&ctx);
12217 fold_checksum_tree (op0, &ctx, ht);
12218 md5_finish_ctx (&ctx, checksum_before);
12222 tem = fold_unary (code, type, op0);
12224 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12226 #ifdef ENABLE_FOLD_CHECKING
12227 md5_init_ctx (&ctx);
12228 fold_checksum_tree (op0, &ctx, ht);
12229 md5_finish_ctx (&ctx, checksum_after);
12232 if (memcmp (checksum_before, checksum_after, 16))
12233 fold_check_failed (op0, tem);
12238 /* Fold a binary tree expression with code CODE of type TYPE with
12239 operands OP0 and OP1. Return a folded expression if successful.
12240 Otherwise, return a tree expression with code CODE of type TYPE
12241 with operands OP0 and OP1. */
12244 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12248 #ifdef ENABLE_FOLD_CHECKING
12249 unsigned char checksum_before_op0[16],
12250 checksum_before_op1[16],
12251 checksum_after_op0[16],
12252 checksum_after_op1[16];
12253 struct md5_ctx ctx;
12256 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12257 md5_init_ctx (&ctx);
12258 fold_checksum_tree (op0, &ctx, ht);
12259 md5_finish_ctx (&ctx, checksum_before_op0);
12262 md5_init_ctx (&ctx);
12263 fold_checksum_tree (op1, &ctx, ht);
12264 md5_finish_ctx (&ctx, checksum_before_op1);
12268 tem = fold_binary (code, type, op0, op1);
12270 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12272 #ifdef ENABLE_FOLD_CHECKING
12273 md5_init_ctx (&ctx);
12274 fold_checksum_tree (op0, &ctx, ht);
12275 md5_finish_ctx (&ctx, checksum_after_op0);
12278 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12279 fold_check_failed (op0, tem);
12281 md5_init_ctx (&ctx);
12282 fold_checksum_tree (op1, &ctx, ht);
12283 md5_finish_ctx (&ctx, checksum_after_op1);
12286 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12287 fold_check_failed (op1, tem);
12292 /* Fold a ternary tree expression with code CODE of type TYPE with
12293 operands OP0, OP1, and OP2. Return a folded expression if
12294 successful. Otherwise, return a tree expression with code CODE of
12295 type TYPE with operands OP0, OP1, and OP2. */
12298 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12302 #ifdef ENABLE_FOLD_CHECKING
12303 unsigned char checksum_before_op0[16],
12304 checksum_before_op1[16],
12305 checksum_before_op2[16],
12306 checksum_after_op0[16],
12307 checksum_after_op1[16],
12308 checksum_after_op2[16];
12309 struct md5_ctx ctx;
12312 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12313 md5_init_ctx (&ctx);
12314 fold_checksum_tree (op0, &ctx, ht);
12315 md5_finish_ctx (&ctx, checksum_before_op0);
12318 md5_init_ctx (&ctx);
12319 fold_checksum_tree (op1, &ctx, ht);
12320 md5_finish_ctx (&ctx, checksum_before_op1);
12323 md5_init_ctx (&ctx);
12324 fold_checksum_tree (op2, &ctx, ht);
12325 md5_finish_ctx (&ctx, checksum_before_op2);
12329 tem = fold_ternary (code, type, op0, op1, op2);
12331 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12333 #ifdef ENABLE_FOLD_CHECKING
12334 md5_init_ctx (&ctx);
12335 fold_checksum_tree (op0, &ctx, ht);
12336 md5_finish_ctx (&ctx, checksum_after_op0);
12339 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12340 fold_check_failed (op0, tem);
12342 md5_init_ctx (&ctx);
12343 fold_checksum_tree (op1, &ctx, ht);
12344 md5_finish_ctx (&ctx, checksum_after_op1);
12347 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12348 fold_check_failed (op1, tem);
12350 md5_init_ctx (&ctx);
12351 fold_checksum_tree (op2, &ctx, ht);
12352 md5_finish_ctx (&ctx, checksum_after_op2);
12355 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12356 fold_check_failed (op2, tem);
12361 /* Perform constant folding and related simplification of initializer
12362 expression EXPR. These behave identically to "fold_buildN" but ignore
12363 potential run-time traps and exceptions that fold must preserve. */
12365 #define START_FOLD_INIT \
12366 int saved_signaling_nans = flag_signaling_nans;\
12367 int saved_trapping_math = flag_trapping_math;\
12368 int saved_rounding_math = flag_rounding_math;\
12369 int saved_trapv = flag_trapv;\
12370 int saved_folding_initializer = folding_initializer;\
12371 flag_signaling_nans = 0;\
12372 flag_trapping_math = 0;\
12373 flag_rounding_math = 0;\
12375 folding_initializer = 1;
12377 #define END_FOLD_INIT \
12378 flag_signaling_nans = saved_signaling_nans;\
12379 flag_trapping_math = saved_trapping_math;\
12380 flag_rounding_math = saved_rounding_math;\
12381 flag_trapv = saved_trapv;\
12382 folding_initializer = saved_folding_initializer;
12385 fold_build1_initializer (enum tree_code code, tree type, tree op)
12390 result = fold_build1 (code, type, op);
12397 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12402 result = fold_build2 (code, type, op0, op1);
12409 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12415 result = fold_build3 (code, type, op0, op1, op2);
12421 #undef START_FOLD_INIT
12422 #undef END_FOLD_INIT
12424 /* Determine if first argument is a multiple of second argument. Return 0 if
12425 it is not, or we cannot easily determined it to be.
12427 An example of the sort of thing we care about (at this point; this routine
12428 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12429 fold cases do now) is discovering that
12431 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12437 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12439 This code also handles discovering that
12441 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12443 is a multiple of 8 so we don't have to worry about dealing with a
12444 possible remainder.
12446 Note that we *look* inside a SAVE_EXPR only to determine how it was
12447 calculated; it is not safe for fold to do much of anything else with the
12448 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12449 at run time. For example, the latter example above *cannot* be implemented
12450 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12451 evaluation time of the original SAVE_EXPR is not necessarily the same at
12452 the time the new expression is evaluated. The only optimization of this
12453 sort that would be valid is changing
12455 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12459 SAVE_EXPR (I) * SAVE_EXPR (J)
12461 (where the same SAVE_EXPR (J) is used in the original and the
12462 transformed version). */
12465 multiple_of_p (tree type, tree top, tree bottom)
12467 if (operand_equal_p (top, bottom, 0))
12470 if (TREE_CODE (type) != INTEGER_TYPE)
12473 switch (TREE_CODE (top))
12476 /* Bitwise and provides a power of two multiple. If the mask is
12477 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12478 if (!integer_pow2p (bottom))
12483 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12484 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12488 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12489 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12492 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12496 op1 = TREE_OPERAND (top, 1);
12497 /* const_binop may not detect overflow correctly,
12498 so check for it explicitly here. */
12499 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12500 > TREE_INT_CST_LOW (op1)
12501 && TREE_INT_CST_HIGH (op1) == 0
12502 && 0 != (t1 = fold_convert (type,
12503 const_binop (LSHIFT_EXPR,
12506 && !TREE_OVERFLOW (t1))
12507 return multiple_of_p (type, t1, bottom);
12512 /* Can't handle conversions from non-integral or wider integral type. */
12513 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12514 || (TYPE_PRECISION (type)
12515 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12518 /* .. fall through ... */
12521 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12524 if (TREE_CODE (bottom) != INTEGER_CST
12525 || (TYPE_UNSIGNED (type)
12526 && (tree_int_cst_sgn (top) < 0
12527 || tree_int_cst_sgn (bottom) < 0)))
12529 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
12537 /* Return true if `t' is known to be non-negative. */
12540 tree_expr_nonnegative_p (tree t)
12542 if (t == error_mark_node)
12545 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12548 switch (TREE_CODE (t))
12551 /* Query VRP to see if it has recorded any information about
12552 the range of this object. */
12553 return ssa_name_nonnegative_p (t);
12556 /* We can't return 1 if flag_wrapv is set because
12557 ABS_EXPR<INT_MIN> = INT_MIN. */
12558 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
12560 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
12565 return tree_int_cst_sgn (t) >= 0;
12568 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12571 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12572 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12573 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12575 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12576 both unsigned and at least 2 bits shorter than the result. */
12577 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12578 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12579 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12581 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12582 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12583 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12584 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12586 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12587 TYPE_PRECISION (inner2)) + 1;
12588 return prec < TYPE_PRECISION (TREE_TYPE (t));
12594 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12596 /* x * x for floating point x is always non-negative. */
12597 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12599 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12600 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12603 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12604 both unsigned and their total bits is shorter than the result. */
12605 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12606 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12607 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12609 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12610 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12611 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12612 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12613 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12614 < TYPE_PRECISION (TREE_TYPE (t));
12620 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12621 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12627 case TRUNC_DIV_EXPR:
12628 case CEIL_DIV_EXPR:
12629 case FLOOR_DIV_EXPR:
12630 case ROUND_DIV_EXPR:
12631 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12632 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12634 case TRUNC_MOD_EXPR:
12635 case CEIL_MOD_EXPR:
12636 case FLOOR_MOD_EXPR:
12637 case ROUND_MOD_EXPR:
12639 case NON_LVALUE_EXPR:
12641 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12643 case COMPOUND_EXPR:
12645 case GIMPLE_MODIFY_STMT:
12646 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12649 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12652 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12653 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12657 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12658 tree outer_type = TREE_TYPE (t);
12660 if (TREE_CODE (outer_type) == REAL_TYPE)
12662 if (TREE_CODE (inner_type) == REAL_TYPE)
12663 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12664 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12666 if (TYPE_UNSIGNED (inner_type))
12668 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12671 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12673 if (TREE_CODE (inner_type) == REAL_TYPE)
12674 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12675 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12676 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12677 && TYPE_UNSIGNED (inner_type);
12684 tree temp = TARGET_EXPR_SLOT (t);
12685 t = TARGET_EXPR_INITIAL (t);
12687 /* If the initializer is non-void, then it's a normal expression
12688 that will be assigned to the slot. */
12689 if (!VOID_TYPE_P (t))
12690 return tree_expr_nonnegative_p (t);
12692 /* Otherwise, the initializer sets the slot in some way. One common
12693 way is an assignment statement at the end of the initializer. */
12696 if (TREE_CODE (t) == BIND_EXPR)
12697 t = expr_last (BIND_EXPR_BODY (t));
12698 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12699 || TREE_CODE (t) == TRY_CATCH_EXPR)
12700 t = expr_last (TREE_OPERAND (t, 0));
12701 else if (TREE_CODE (t) == STATEMENT_LIST)
12706 if ((TREE_CODE (t) == MODIFY_EXPR
12707 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12708 && GENERIC_TREE_OPERAND (t, 0) == temp)
12709 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12716 tree fndecl = get_callee_fndecl (t);
12717 tree arglist = TREE_OPERAND (t, 1);
12718 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12719 switch (DECL_FUNCTION_CODE (fndecl))
12721 CASE_FLT_FN (BUILT_IN_ACOS):
12722 CASE_FLT_FN (BUILT_IN_ACOSH):
12723 CASE_FLT_FN (BUILT_IN_CABS):
12724 CASE_FLT_FN (BUILT_IN_COSH):
12725 CASE_FLT_FN (BUILT_IN_ERFC):
12726 CASE_FLT_FN (BUILT_IN_EXP):
12727 CASE_FLT_FN (BUILT_IN_EXP10):
12728 CASE_FLT_FN (BUILT_IN_EXP2):
12729 CASE_FLT_FN (BUILT_IN_FABS):
12730 CASE_FLT_FN (BUILT_IN_FDIM):
12731 CASE_FLT_FN (BUILT_IN_HYPOT):
12732 CASE_FLT_FN (BUILT_IN_POW10):
12733 CASE_INT_FN (BUILT_IN_FFS):
12734 CASE_INT_FN (BUILT_IN_PARITY):
12735 CASE_INT_FN (BUILT_IN_POPCOUNT):
12736 case BUILT_IN_BSWAP32:
12737 case BUILT_IN_BSWAP64:
12741 CASE_FLT_FN (BUILT_IN_SQRT):
12742 /* sqrt(-0.0) is -0.0. */
12743 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12745 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12747 CASE_FLT_FN (BUILT_IN_ASINH):
12748 CASE_FLT_FN (BUILT_IN_ATAN):
12749 CASE_FLT_FN (BUILT_IN_ATANH):
12750 CASE_FLT_FN (BUILT_IN_CBRT):
12751 CASE_FLT_FN (BUILT_IN_CEIL):
12752 CASE_FLT_FN (BUILT_IN_ERF):
12753 CASE_FLT_FN (BUILT_IN_EXPM1):
12754 CASE_FLT_FN (BUILT_IN_FLOOR):
12755 CASE_FLT_FN (BUILT_IN_FMOD):
12756 CASE_FLT_FN (BUILT_IN_FREXP):
12757 CASE_FLT_FN (BUILT_IN_LCEIL):
12758 CASE_FLT_FN (BUILT_IN_LDEXP):
12759 CASE_FLT_FN (BUILT_IN_LFLOOR):
12760 CASE_FLT_FN (BUILT_IN_LLCEIL):
12761 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12762 CASE_FLT_FN (BUILT_IN_LLRINT):
12763 CASE_FLT_FN (BUILT_IN_LLROUND):
12764 CASE_FLT_FN (BUILT_IN_LRINT):
12765 CASE_FLT_FN (BUILT_IN_LROUND):
12766 CASE_FLT_FN (BUILT_IN_MODF):
12767 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12768 CASE_FLT_FN (BUILT_IN_RINT):
12769 CASE_FLT_FN (BUILT_IN_ROUND):
12770 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12771 CASE_FLT_FN (BUILT_IN_SINH):
12772 CASE_FLT_FN (BUILT_IN_TANH):
12773 CASE_FLT_FN (BUILT_IN_TRUNC):
12774 /* True if the 1st argument is nonnegative. */
12775 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12777 CASE_FLT_FN (BUILT_IN_FMAX):
12778 /* True if the 1st OR 2nd arguments are nonnegative. */
12779 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12780 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12782 CASE_FLT_FN (BUILT_IN_FMIN):
12783 /* True if the 1st AND 2nd arguments are nonnegative. */
12784 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12785 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12787 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12788 /* True if the 2nd argument is nonnegative. */
12789 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12791 CASE_FLT_FN (BUILT_IN_POWI):
12792 /* True if the 1st argument is nonnegative or the second
12793 argument is an even integer. */
12794 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12796 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12797 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12800 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12802 CASE_FLT_FN (BUILT_IN_POW):
12803 /* True if the 1st argument is nonnegative or the second
12804 argument is an even integer valued real. */
12805 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12810 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12811 n = real_to_integer (&c);
12814 REAL_VALUE_TYPE cint;
12815 real_from_integer (&cint, VOIDmode, n,
12816 n < 0 ? -1 : 0, 0);
12817 if (real_identical (&c, &cint))
12821 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12828 /* ... fall through ... */
12831 if (truth_value_p (TREE_CODE (t)))
12832 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12836 /* We don't know sign of `t', so be conservative and return false. */
12840 /* Return true when T is an address and is known to be nonzero.
12841 For floating point we further ensure that T is not denormal.
12842 Similar logic is present in nonzero_address in rtlanal.h. */
12845 tree_expr_nonzero_p (tree t)
12847 tree type = TREE_TYPE (t);
12849 /* Doing something useful for floating point would need more work. */
12850 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12853 switch (TREE_CODE (t))
12856 /* Query VRP to see if it has recorded any information about
12857 the range of this object. */
12858 return ssa_name_nonzero_p (t);
12861 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12864 return !integer_zerop (t);
12867 if (TYPE_OVERFLOW_UNDEFINED (type))
12869 /* With the presence of negative values it is hard
12870 to say something. */
12871 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12872 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12874 /* One of operands must be positive and the other non-negative. */
12875 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12876 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12881 if (TYPE_OVERFLOW_UNDEFINED (type))
12883 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12884 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12890 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12891 tree outer_type = TREE_TYPE (t);
12893 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12894 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12900 tree base = get_base_address (TREE_OPERAND (t, 0));
12905 /* Weak declarations may link to NULL. */
12906 if (VAR_OR_FUNCTION_DECL_P (base))
12907 return !DECL_WEAK (base);
12909 /* Constants are never weak. */
12910 if (CONSTANT_CLASS_P (base))
12917 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12918 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12921 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12922 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12925 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12927 /* When both operands are nonzero, then MAX must be too. */
12928 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12931 /* MAX where operand 0 is positive is positive. */
12932 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12934 /* MAX where operand 1 is positive is positive. */
12935 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12936 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12940 case COMPOUND_EXPR:
12942 case GIMPLE_MODIFY_STMT:
12944 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12947 case NON_LVALUE_EXPR:
12948 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12951 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12952 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12955 return alloca_call_p (t);
12963 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12964 attempt to fold the expression to a constant without modifying TYPE,
12967 If the expression could be simplified to a constant, then return
12968 the constant. If the expression would not be simplified to a
12969 constant, then return NULL_TREE. */
12972 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12974 tree tem = fold_binary (code, type, op0, op1);
12975 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12978 /* Given the components of a unary expression CODE, TYPE and OP0,
12979 attempt to fold the expression to a constant without modifying
12982 If the expression could be simplified to a constant, then return
12983 the constant. If the expression would not be simplified to a
12984 constant, then return NULL_TREE. */
12987 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12989 tree tem = fold_unary (code, type, op0);
12990 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12993 /* If EXP represents referencing an element in a constant string
12994 (either via pointer arithmetic or array indexing), return the
12995 tree representing the value accessed, otherwise return NULL. */
12998 fold_read_from_constant_string (tree exp)
13000 if ((TREE_CODE (exp) == INDIRECT_REF
13001 || TREE_CODE (exp) == ARRAY_REF)
13002 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13004 tree exp1 = TREE_OPERAND (exp, 0);
13008 if (TREE_CODE (exp) == INDIRECT_REF)
13009 string = string_constant (exp1, &index);
13012 tree low_bound = array_ref_low_bound (exp);
13013 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13015 /* Optimize the special-case of a zero lower bound.
13017 We convert the low_bound to sizetype to avoid some problems
13018 with constant folding. (E.g. suppose the lower bound is 1,
13019 and its mode is QI. Without the conversion,l (ARRAY
13020 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13021 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13022 if (! integer_zerop (low_bound))
13023 index = size_diffop (index, fold_convert (sizetype, low_bound));
13029 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13030 && TREE_CODE (string) == STRING_CST
13031 && TREE_CODE (index) == INTEGER_CST
13032 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13033 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13035 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13036 return fold_convert (TREE_TYPE (exp),
13037 build_int_cst (NULL_TREE,
13038 (TREE_STRING_POINTER (string)
13039 [TREE_INT_CST_LOW (index)])));
13044 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13045 an integer constant or real constant.
13047 TYPE is the type of the result. */
13050 fold_negate_const (tree arg0, tree type)
13052 tree t = NULL_TREE;
13054 switch (TREE_CODE (arg0))
13058 unsigned HOST_WIDE_INT low;
13059 HOST_WIDE_INT high;
13060 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13061 TREE_INT_CST_HIGH (arg0),
13063 t = force_fit_type_double (type, low, high, 1,
13064 (overflow | TREE_OVERFLOW (arg0))
13065 && !TYPE_UNSIGNED (type));
13070 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13074 gcc_unreachable ();
13080 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13081 an integer constant or real constant.
13083 TYPE is the type of the result. */
13086 fold_abs_const (tree arg0, tree type)
13088 tree t = NULL_TREE;
13090 switch (TREE_CODE (arg0))
13093 /* If the value is unsigned, then the absolute value is
13094 the same as the ordinary value. */
13095 if (TYPE_UNSIGNED (type))
13097 /* Similarly, if the value is non-negative. */
13098 else if (INT_CST_LT (integer_minus_one_node, arg0))
13100 /* If the value is negative, then the absolute value is
13104 unsigned HOST_WIDE_INT low;
13105 HOST_WIDE_INT high;
13106 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13107 TREE_INT_CST_HIGH (arg0),
13109 t = force_fit_type_double (type, low, high, -1,
13110 overflow | TREE_OVERFLOW (arg0));
13115 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13116 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13122 gcc_unreachable ();
13128 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13129 constant. TYPE is the type of the result. */
13132 fold_not_const (tree arg0, tree type)
13134 tree t = NULL_TREE;
13136 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13138 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13139 ~TREE_INT_CST_HIGH (arg0), 0,
13140 TREE_OVERFLOW (arg0));
13145 /* Given CODE, a relational operator, the target type, TYPE and two
13146 constant operands OP0 and OP1, return the result of the
13147 relational operation. If the result is not a compile time
13148 constant, then return NULL_TREE. */
13151 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13153 int result, invert;
13155 /* From here on, the only cases we handle are when the result is
13156 known to be a constant. */
13158 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13160 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13161 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13163 /* Handle the cases where either operand is a NaN. */
13164 if (real_isnan (c0) || real_isnan (c1))
13174 case UNORDERED_EXPR:
13188 if (flag_trapping_math)
13194 gcc_unreachable ();
13197 return constant_boolean_node (result, type);
13200 return constant_boolean_node (real_compare (code, c0, c1), type);
13203 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13205 To compute GT, swap the arguments and do LT.
13206 To compute GE, do LT and invert the result.
13207 To compute LE, swap the arguments, do LT and invert the result.
13208 To compute NE, do EQ and invert the result.
13210 Therefore, the code below must handle only EQ and LT. */
13212 if (code == LE_EXPR || code == GT_EXPR)
13217 code = swap_tree_comparison (code);
13220 /* Note that it is safe to invert for real values here because we
13221 have already handled the one case that it matters. */
13224 if (code == NE_EXPR || code == GE_EXPR)
13227 code = invert_tree_comparison (code, false);
13230 /* Compute a result for LT or EQ if args permit;
13231 Otherwise return T. */
13232 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13234 if (code == EQ_EXPR)
13235 result = tree_int_cst_equal (op0, op1);
13236 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13237 result = INT_CST_LT_UNSIGNED (op0, op1);
13239 result = INT_CST_LT (op0, op1);
13246 return constant_boolean_node (result, type);
13249 /* Build an expression for the a clean point containing EXPR with type TYPE.
13250 Don't build a cleanup point expression for EXPR which don't have side
13254 fold_build_cleanup_point_expr (tree type, tree expr)
13256 /* If the expression does not have side effects then we don't have to wrap
13257 it with a cleanup point expression. */
13258 if (!TREE_SIDE_EFFECTS (expr))
13261 /* If the expression is a return, check to see if the expression inside the
13262 return has no side effects or the right hand side of the modify expression
13263 inside the return. If either don't have side effects set we don't need to
13264 wrap the expression in a cleanup point expression. Note we don't check the
13265 left hand side of the modify because it should always be a return decl. */
13266 if (TREE_CODE (expr) == RETURN_EXPR)
13268 tree op = TREE_OPERAND (expr, 0);
13269 if (!op || !TREE_SIDE_EFFECTS (op))
13271 op = TREE_OPERAND (op, 1);
13272 if (!TREE_SIDE_EFFECTS (op))
13276 return build1 (CLEANUP_POINT_EXPR, type, expr);
13279 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13280 avoid confusing the gimplify process. */
13283 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13285 /* The size of the object is not relevant when talking about its address. */
13286 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13287 t = TREE_OPERAND (t, 0);
13289 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13290 if (TREE_CODE (t) == INDIRECT_REF
13291 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13293 t = TREE_OPERAND (t, 0);
13294 if (TREE_TYPE (t) != ptrtype)
13295 t = build1 (NOP_EXPR, ptrtype, t);
13301 while (handled_component_p (base))
13302 base = TREE_OPERAND (base, 0);
13304 TREE_ADDRESSABLE (base) = 1;
13306 t = build1 (ADDR_EXPR, ptrtype, t);
13313 build_fold_addr_expr (tree t)
13315 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13318 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13319 of an indirection through OP0, or NULL_TREE if no simplification is
13323 fold_indirect_ref_1 (tree type, tree op0)
13329 subtype = TREE_TYPE (sub);
13330 if (!POINTER_TYPE_P (subtype))
13333 if (TREE_CODE (sub) == ADDR_EXPR)
13335 tree op = TREE_OPERAND (sub, 0);
13336 tree optype = TREE_TYPE (op);
13337 /* *&CONST_DECL -> to the value of the const decl. */
13338 if (TREE_CODE (op) == CONST_DECL)
13339 return DECL_INITIAL (op);
13340 /* *&p => p; make sure to handle *&"str"[cst] here. */
13341 if (type == optype)
13343 tree fop = fold_read_from_constant_string (op);
13349 /* *(foo *)&fooarray => fooarray[0] */
13350 else if (TREE_CODE (optype) == ARRAY_TYPE
13351 && type == TREE_TYPE (optype))
13353 tree type_domain = TYPE_DOMAIN (optype);
13354 tree min_val = size_zero_node;
13355 if (type_domain && TYPE_MIN_VALUE (type_domain))
13356 min_val = TYPE_MIN_VALUE (type_domain);
13357 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13359 /* *(foo *)&complexfoo => __real__ complexfoo */
13360 else if (TREE_CODE (optype) == COMPLEX_TYPE
13361 && type == TREE_TYPE (optype))
13362 return fold_build1 (REALPART_EXPR, type, op);
13363 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13364 else if (TREE_CODE (optype) == VECTOR_TYPE
13365 && type == TREE_TYPE (optype))
13367 tree part_width = TYPE_SIZE (type);
13368 tree index = bitsize_int (0);
13369 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13373 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13374 if (TREE_CODE (sub) == PLUS_EXPR
13375 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13377 tree op00 = TREE_OPERAND (sub, 0);
13378 tree op01 = TREE_OPERAND (sub, 1);
13382 op00type = TREE_TYPE (op00);
13383 if (TREE_CODE (op00) == ADDR_EXPR
13384 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13385 && type == TREE_TYPE (TREE_TYPE (op00type)))
13387 tree size = TYPE_SIZE_UNIT (type);
13388 if (tree_int_cst_equal (size, op01))
13389 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13393 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13394 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13395 && type == TREE_TYPE (TREE_TYPE (subtype)))
13398 tree min_val = size_zero_node;
13399 sub = build_fold_indirect_ref (sub);
13400 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13401 if (type_domain && TYPE_MIN_VALUE (type_domain))
13402 min_val = TYPE_MIN_VALUE (type_domain);
13403 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13409 /* Builds an expression for an indirection through T, simplifying some
13413 build_fold_indirect_ref (tree t)
13415 tree type = TREE_TYPE (TREE_TYPE (t));
13416 tree sub = fold_indirect_ref_1 (type, t);
13421 return build1 (INDIRECT_REF, type, t);
13424 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13427 fold_indirect_ref (tree t)
13429 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13437 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13438 whose result is ignored. The type of the returned tree need not be
13439 the same as the original expression. */
13442 fold_ignored_result (tree t)
13444 if (!TREE_SIDE_EFFECTS (t))
13445 return integer_zero_node;
13448 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13451 t = TREE_OPERAND (t, 0);
13455 case tcc_comparison:
13456 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13457 t = TREE_OPERAND (t, 0);
13458 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13459 t = TREE_OPERAND (t, 1);
13464 case tcc_expression:
13465 switch (TREE_CODE (t))
13467 case COMPOUND_EXPR:
13468 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13470 t = TREE_OPERAND (t, 0);
13474 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13475 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13477 t = TREE_OPERAND (t, 0);
13490 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13491 This can only be applied to objects of a sizetype. */
13494 round_up (tree value, int divisor)
13496 tree div = NULL_TREE;
13498 gcc_assert (divisor > 0);
13502 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13503 have to do anything. Only do this when we are not given a const,
13504 because in that case, this check is more expensive than just
13506 if (TREE_CODE (value) != INTEGER_CST)
13508 div = build_int_cst (TREE_TYPE (value), divisor);
13510 if (multiple_of_p (TREE_TYPE (value), value, div))
13514 /* If divisor is a power of two, simplify this to bit manipulation. */
13515 if (divisor == (divisor & -divisor))
13517 if (TREE_CODE (value) == INTEGER_CST)
13519 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
13520 unsigned HOST_WIDE_INT high;
13523 if ((low & (divisor - 1)) == 0)
13526 overflow_p = TREE_OVERFLOW (value);
13527 high = TREE_INT_CST_HIGH (value);
13528 low &= ~(divisor - 1);
13537 return force_fit_type_double (TREE_TYPE (value), low, high,
13544 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13545 value = size_binop (PLUS_EXPR, value, t);
13546 t = build_int_cst (TREE_TYPE (value), -divisor);
13547 value = size_binop (BIT_AND_EXPR, value, t);
13553 div = build_int_cst (TREE_TYPE (value), divisor);
13554 value = size_binop (CEIL_DIV_EXPR, value, div);
13555 value = size_binop (MULT_EXPR, value, div);
13561 /* Likewise, but round down. */
13564 round_down (tree value, int divisor)
13566 tree div = NULL_TREE;
13568 gcc_assert (divisor > 0);
13572 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13573 have to do anything. Only do this when we are not given a const,
13574 because in that case, this check is more expensive than just
13576 if (TREE_CODE (value) != INTEGER_CST)
13578 div = build_int_cst (TREE_TYPE (value), divisor);
13580 if (multiple_of_p (TREE_TYPE (value), value, div))
13584 /* If divisor is a power of two, simplify this to bit manipulation. */
13585 if (divisor == (divisor & -divisor))
13589 t = build_int_cst (TREE_TYPE (value), -divisor);
13590 value = size_binop (BIT_AND_EXPR, value, t);
13595 div = build_int_cst (TREE_TYPE (value), divisor);
13596 value = size_binop (FLOOR_DIV_EXPR, value, div);
13597 value = size_binop (MULT_EXPR, value, div);
13603 /* Returns the pointer to the base of the object addressed by EXP and
13604 extracts the information about the offset of the access, storing it
13605 to PBITPOS and POFFSET. */
13608 split_address_to_core_and_offset (tree exp,
13609 HOST_WIDE_INT *pbitpos, tree *poffset)
13612 enum machine_mode mode;
13613 int unsignedp, volatilep;
13614 HOST_WIDE_INT bitsize;
13616 if (TREE_CODE (exp) == ADDR_EXPR)
13618 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13619 poffset, &mode, &unsignedp, &volatilep,
13621 core = build_fold_addr_expr (core);
13627 *poffset = NULL_TREE;
13633 /* Returns true if addresses of E1 and E2 differ by a constant, false
13634 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13637 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13640 HOST_WIDE_INT bitpos1, bitpos2;
13641 tree toffset1, toffset2, tdiff, type;
13643 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13644 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13646 if (bitpos1 % BITS_PER_UNIT != 0
13647 || bitpos2 % BITS_PER_UNIT != 0
13648 || !operand_equal_p (core1, core2, 0))
13651 if (toffset1 && toffset2)
13653 type = TREE_TYPE (toffset1);
13654 if (type != TREE_TYPE (toffset2))
13655 toffset2 = fold_convert (type, toffset2);
13657 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13658 if (!cst_and_fits_in_hwi (tdiff))
13661 *diff = int_cst_value (tdiff);
13663 else if (toffset1 || toffset2)
13665 /* If only one of the offsets is non-constant, the difference cannot
13672 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13676 /* Simplify the floating point expression EXP when the sign of the
13677 result is not significant. Return NULL_TREE if no simplification
13681 fold_strip_sign_ops (tree exp)
13685 switch (TREE_CODE (exp))
13689 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13690 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13694 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13696 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13697 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13698 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13699 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13700 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13701 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13704 case COMPOUND_EXPR:
13705 arg0 = TREE_OPERAND (exp, 0);
13706 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13708 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13712 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13713 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13715 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13716 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13717 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13722 const enum built_in_function fcode = builtin_mathfn_code (exp);
13725 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13726 /* Strip copysign function call, return the 1st argument. */
13727 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13728 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13729 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13732 /* Strip sign ops from the argument of "odd" math functions. */
13733 if (negate_mathfn_p (fcode))
13735 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13737 return build_function_call_expr (get_callee_fndecl (exp),
13738 build_tree_list (NULL_TREE,