1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static tree fold_convert_const (enum tree_code, tree, tree);
93 static enum tree_code invert_tree_comparison (enum tree_code, bool);
94 static enum comparison_code comparison_to_compcode (enum tree_code);
95 static enum tree_code compcode_to_comparison (enum comparison_code);
96 static tree combine_comparisons (enum tree_code, enum tree_code,
97 enum tree_code, tree, tree, tree);
98 static int truth_value_p (enum tree_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand (tree, tree, tree);
103 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (tree, tree, int, int, int);
105 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
106 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
107 enum machine_mode *, int *, int *,
109 static int all_ones_mask_p (tree, int);
110 static tree sign_bit_p (tree, tree);
111 static int simple_operand_p (tree);
112 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
113 static tree make_range (tree, int *, tree *, tree *);
114 static tree build_range_check (tree, tree, int, tree, tree);
115 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 static tree fold_range_test (tree);
118 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
127 static bool fold_real_zero_addition_p (tree, tree, int);
128 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
130 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131 static tree fold_div_compare (enum tree_code, tree, tree, tree);
132 static bool reorder_operands_p (tree, tree);
133 static tree fold_negate_const (tree, tree);
134 static tree fold_not_const (tree, tree);
135 static tree fold_relational_const (enum tree_code, tree, tree, tree);
136 static tree fold_relational_hi_lo (enum tree_code *, const tree,
138 static bool tree_expr_nonzero_p (tree);
140 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
141 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
142 and SUM1. Then this yields nonzero if overflow occurred during the
145 Overflow occurs if A and B have the same sign, but A and SUM differ in
146 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
148 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
150 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
151 We do that by representing the two-word integer in 4 words, with only
152 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
153 number. The value of the word is LOWPART + HIGHPART * BASE. */
156 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
157 #define HIGHPART(x) \
158 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
159 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
161 /* Unpack a two-word integer into 4 words.
162 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
163 WORDS points to the array of HOST_WIDE_INTs. */
166 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
168 words[0] = LOWPART (low);
169 words[1] = HIGHPART (low);
170 words[2] = LOWPART (hi);
171 words[3] = HIGHPART (hi);
174 /* Pack an array of 4 words into a two-word integer.
175 WORDS points to the array of words.
176 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
179 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
182 *low = words[0] + words[1] * BASE;
183 *hi = words[2] + words[3] * BASE;
186 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
187 in overflow of the value, when >0 we are only interested in signed
188 overflow, for <0 we are interested in any overflow. OVERFLOWED
189 indicates whether overflow has already occurred. CONST_OVERFLOWED
190 indicates whether constant overflow has already occurred. We force
191 T's value to be within range of T's type (by setting to 0 or 1 all
192 the bits outside the type's range). We set TREE_OVERFLOWED if,
193 OVERFLOWED is non-zero,
194 or OVERFLOWABLE is >0 and signed overflow occurs
195 or OVERFLOWABLE is <0 and any overflow occurs
196 We set TREE_CONSTANT_OVERFLOWED if,
197 CONST_OVERFLOWED is non-zero
198 or we set TREE_OVERFLOWED.
199 We return either the original T, or a copy. */
202 force_fit_type (tree t, int overflowable,
203 bool overflowed, bool overflowed_const)
205 unsigned HOST_WIDE_INT low;
208 int sign_extended_type;
210 if (TREE_CODE (t) != INTEGER_CST)
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
273 || (overflowable > 0 && sign_extended_type))
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
282 TREE_CONSTANT_OVERFLOW (t) = 1;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
303 h = h1 + h2 + (l < l1);
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
323 return (*hv & h1) < 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
360 for (j = 0; j < 4; j++)
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
410 if (SHIFT_COUNT_TRUNCATED)
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
465 unsigned HOST_WIDE_INT signmask;
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
471 if (SHIFT_COUNT_TRUNCATED)
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
481 else if (count >= HOST_BITS_PER_WIDE_INT)
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
615 /* This unsigned division rounds toward zero. */
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
668 { /* scale divisor and dividend */
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
747 decode (quo, lquo, hquo);
750 /* If result is negative, make it so. */
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, <wice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
842 negate_mathfn_p (enum built_in_function code)
866 /* Check whether we may negate an integer constant T without causing
870 may_negate_without_overflow_p (tree t)
872 unsigned HOST_WIDE_INT val;
876 if (TREE_CODE (t) != INTEGER_CST)
879 type = TREE_TYPE (t);
880 if (TYPE_UNSIGNED (type))
883 prec = TYPE_PRECISION (type);
884 if (prec > HOST_BITS_PER_WIDE_INT)
886 if (TREE_INT_CST_LOW (t) != 0)
888 prec -= HOST_BITS_PER_WIDE_INT;
889 val = TREE_INT_CST_HIGH (t);
892 val = TREE_INT_CST_LOW (t);
893 if (prec < HOST_BITS_PER_WIDE_INT)
894 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
895 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
898 /* Determine whether an expression T can be cheaply negated using
899 the function negate_expr. */
902 negate_expr_p (tree t)
909 type = TREE_TYPE (t);
912 switch (TREE_CODE (t))
915 if (TYPE_UNSIGNED (type) || ! flag_trapv)
918 /* Check that -CST will not overflow type. */
919 return may_negate_without_overflow_p (t);
926 return negate_expr_p (TREE_REALPART (t))
927 && negate_expr_p (TREE_IMAGPART (t));
930 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
932 /* -(A + B) -> (-B) - A. */
933 if (negate_expr_p (TREE_OPERAND (t, 1))
934 && reorder_operands_p (TREE_OPERAND (t, 0),
935 TREE_OPERAND (t, 1)))
937 /* -(A + B) -> (-A) - B. */
938 return negate_expr_p (TREE_OPERAND (t, 0));
941 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
942 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
943 && reorder_operands_p (TREE_OPERAND (t, 0),
944 TREE_OPERAND (t, 1));
947 if (TYPE_UNSIGNED (TREE_TYPE (t)))
953 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
954 return negate_expr_p (TREE_OPERAND (t, 1))
955 || negate_expr_p (TREE_OPERAND (t, 0));
959 /* Negate -((double)float) as (double)(-float). */
960 if (TREE_CODE (type) == REAL_TYPE)
962 tree tem = strip_float_extensions (t);
964 return negate_expr_p (tem);
969 /* Negate -f(x) as f(-x). */
970 if (negate_mathfn_p (builtin_mathfn_code (t)))
971 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
975 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
976 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
978 tree op1 = TREE_OPERAND (t, 1);
979 if (TREE_INT_CST_HIGH (op1) == 0
980 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
981 == TREE_INT_CST_LOW (op1))
992 /* Given T, an expression, return the negation of T. Allow for T to be
993 null, in which case return null. */
1004 type = TREE_TYPE (t);
1005 STRIP_SIGN_NOPS (t);
1007 switch (TREE_CODE (t))
1010 tem = fold_negate_const (t, type);
1011 if (! TREE_OVERFLOW (tem)
1012 || TYPE_UNSIGNED (type)
1018 tem = fold_negate_const (t, type);
1019 /* Two's complement FP formats, such as c4x, may overflow. */
1020 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1021 return fold_convert (type, tem);
1026 tree rpart = negate_expr (TREE_REALPART (t));
1027 tree ipart = negate_expr (TREE_IMAGPART (t));
1029 if ((TREE_CODE (rpart) == REAL_CST
1030 && TREE_CODE (ipart) == REAL_CST)
1031 || (TREE_CODE (rpart) == INTEGER_CST
1032 && TREE_CODE (ipart) == INTEGER_CST))
1033 return build_complex (type, rpart, ipart);
1038 return fold_convert (type, TREE_OPERAND (t, 0));
1041 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1043 /* -(A + B) -> (-B) - A. */
1044 if (negate_expr_p (TREE_OPERAND (t, 1))
1045 && reorder_operands_p (TREE_OPERAND (t, 0),
1046 TREE_OPERAND (t, 1)))
1048 tem = negate_expr (TREE_OPERAND (t, 1));
1049 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1050 tem, TREE_OPERAND (t, 0)));
1051 return fold_convert (type, tem);
1054 /* -(A + B) -> (-A) - B. */
1055 if (negate_expr_p (TREE_OPERAND (t, 0)))
1057 tem = negate_expr (TREE_OPERAND (t, 0));
1058 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1059 tem, TREE_OPERAND (t, 1)));
1060 return fold_convert (type, tem);
1066 /* - (A - B) -> B - A */
1067 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1068 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1069 return fold_convert (type,
1070 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1071 TREE_OPERAND (t, 1),
1072 TREE_OPERAND (t, 0))));
1076 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1082 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1084 tem = TREE_OPERAND (t, 1);
1085 if (negate_expr_p (tem))
1086 return fold_convert (type,
1087 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1088 TREE_OPERAND (t, 0),
1089 negate_expr (tem))));
1090 tem = TREE_OPERAND (t, 0);
1091 if (negate_expr_p (tem))
1092 return fold_convert (type,
1093 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1095 TREE_OPERAND (t, 1))));
1100 /* Convert -((double)float) into (double)(-float). */
1101 if (TREE_CODE (type) == REAL_TYPE)
1103 tem = strip_float_extensions (t);
1104 if (tem != t && negate_expr_p (tem))
1105 return fold_convert (type, negate_expr (tem));
1110 /* Negate -f(x) as f(-x). */
1111 if (negate_mathfn_p (builtin_mathfn_code (t))
1112 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1114 tree fndecl, arg, arglist;
1116 fndecl = get_callee_fndecl (t);
1117 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1118 arglist = build_tree_list (NULL_TREE, arg);
1119 return build_function_call_expr (fndecl, arglist);
1124 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1125 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1127 tree op1 = TREE_OPERAND (t, 1);
1128 if (TREE_INT_CST_HIGH (op1) == 0
1129 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1130 == TREE_INT_CST_LOW (op1))
1132 tree ntype = TYPE_UNSIGNED (type)
1133 ? lang_hooks.types.signed_type (type)
1134 : lang_hooks.types.unsigned_type (type);
1135 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1136 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1137 return fold_convert (type, temp);
1146 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1147 return fold_convert (type, tem);
1150 /* Split a tree IN into a constant, literal and variable parts that could be
1151 combined with CODE to make IN. "constant" means an expression with
1152 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1153 commutative arithmetic operation. Store the constant part into *CONP,
1154 the literal in *LITP and return the variable part. If a part isn't
1155 present, set it to null. If the tree does not decompose in this way,
1156 return the entire tree as the variable part and the other parts as null.
1158 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1159 case, we negate an operand that was subtracted. Except if it is a
1160 literal for which we use *MINUS_LITP instead.
1162 If NEGATE_P is true, we are negating all of IN, again except a literal
1163 for which we use *MINUS_LITP instead.
1165 If IN is itself a literal or constant, return it as appropriate.
1167 Note that we do not guarantee that any of the three values will be the
1168 same type as IN, but they will have the same signedness and mode. */
1171 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1172 tree *minus_litp, int negate_p)
1180 /* Strip any conversions that don't change the machine mode or signedness. */
1181 STRIP_SIGN_NOPS (in);
1183 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1185 else if (TREE_CODE (in) == code
1186 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1187 /* We can associate addition and subtraction together (even
1188 though the C standard doesn't say so) for integers because
1189 the value is not affected. For reals, the value might be
1190 affected, so we can't. */
1191 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1192 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1194 tree op0 = TREE_OPERAND (in, 0);
1195 tree op1 = TREE_OPERAND (in, 1);
1196 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1197 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1199 /* First see if either of the operands is a literal, then a constant. */
1200 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1201 *litp = op0, op0 = 0;
1202 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1203 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1205 if (op0 != 0 && TREE_CONSTANT (op0))
1206 *conp = op0, op0 = 0;
1207 else if (op1 != 0 && TREE_CONSTANT (op1))
1208 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1210 /* If we haven't dealt with either operand, this is not a case we can
1211 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1212 if (op0 != 0 && op1 != 0)
1217 var = op1, neg_var_p = neg1_p;
1219 /* Now do any needed negations. */
1221 *minus_litp = *litp, *litp = 0;
1223 *conp = negate_expr (*conp);
1225 var = negate_expr (var);
1227 else if (TREE_CONSTANT (in))
1235 *minus_litp = *litp, *litp = 0;
1236 else if (*minus_litp)
1237 *litp = *minus_litp, *minus_litp = 0;
1238 *conp = negate_expr (*conp);
1239 var = negate_expr (var);
1245 /* Re-associate trees split by the above function. T1 and T2 are either
1246 expressions to associate or null. Return the new expression, if any. If
1247 we build an operation, do it in TYPE and with CODE. */
1250 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1257 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1258 try to fold this since we will have infinite recursion. But do
1259 deal with any NEGATE_EXPRs. */
1260 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1261 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1263 if (code == PLUS_EXPR)
1265 if (TREE_CODE (t1) == NEGATE_EXPR)
1266 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1267 fold_convert (type, TREE_OPERAND (t1, 0)));
1268 else if (TREE_CODE (t2) == NEGATE_EXPR)
1269 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1270 fold_convert (type, TREE_OPERAND (t2, 0)));
1272 return build2 (code, type, fold_convert (type, t1),
1273 fold_convert (type, t2));
1276 return fold (build2 (code, type, fold_convert (type, t1),
1277 fold_convert (type, t2)));
1280 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1281 to produce a new constant.
1283 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1286 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1288 unsigned HOST_WIDE_INT int1l, int2l;
1289 HOST_WIDE_INT int1h, int2h;
1290 unsigned HOST_WIDE_INT low;
1292 unsigned HOST_WIDE_INT garbagel;
1293 HOST_WIDE_INT garbageh;
1295 tree type = TREE_TYPE (arg1);
1296 int uns = TYPE_UNSIGNED (type);
1298 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1300 int no_overflow = 0;
1302 int1l = TREE_INT_CST_LOW (arg1);
1303 int1h = TREE_INT_CST_HIGH (arg1);
1304 int2l = TREE_INT_CST_LOW (arg2);
1305 int2h = TREE_INT_CST_HIGH (arg2);
1310 low = int1l | int2l, hi = int1h | int2h;
1314 low = int1l ^ int2l, hi = int1h ^ int2h;
1318 low = int1l & int2l, hi = int1h & int2h;
1324 /* It's unclear from the C standard whether shifts can overflow.
1325 The following code ignores overflow; perhaps a C standard
1326 interpretation ruling is needed. */
1327 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1335 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1340 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1344 neg_double (int2l, int2h, &low, &hi);
1345 add_double (int1l, int1h, low, hi, &low, &hi);
1346 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1350 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1353 case TRUNC_DIV_EXPR:
1354 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1355 case EXACT_DIV_EXPR:
1356 /* This is a shortcut for a common special case. */
1357 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1358 && ! TREE_CONSTANT_OVERFLOW (arg1)
1359 && ! TREE_CONSTANT_OVERFLOW (arg2)
1360 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1362 if (code == CEIL_DIV_EXPR)
1365 low = int1l / int2l, hi = 0;
1369 /* ... fall through ... */
1371 case ROUND_DIV_EXPR:
1372 if (int2h == 0 && int2l == 1)
1374 low = int1l, hi = int1h;
1377 if (int1l == int2l && int1h == int2h
1378 && ! (int1l == 0 && int1h == 0))
1383 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1384 &low, &hi, &garbagel, &garbageh);
1387 case TRUNC_MOD_EXPR:
1388 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1389 /* This is a shortcut for a common special case. */
1390 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1391 && ! TREE_CONSTANT_OVERFLOW (arg1)
1392 && ! TREE_CONSTANT_OVERFLOW (arg2)
1393 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1395 if (code == CEIL_MOD_EXPR)
1397 low = int1l % int2l, hi = 0;
1401 /* ... fall through ... */
1403 case ROUND_MOD_EXPR:
1404 overflow = div_and_round_double (code, uns,
1405 int1l, int1h, int2l, int2h,
1406 &garbagel, &garbageh, &low, &hi);
1412 low = (((unsigned HOST_WIDE_INT) int1h
1413 < (unsigned HOST_WIDE_INT) int2h)
1414 || (((unsigned HOST_WIDE_INT) int1h
1415 == (unsigned HOST_WIDE_INT) int2h)
1418 low = (int1h < int2h
1419 || (int1h == int2h && int1l < int2l));
1421 if (low == (code == MIN_EXPR))
1422 low = int1l, hi = int1h;
1424 low = int2l, hi = int2h;
1431 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1435 /* Propagate overflow flags ourselves. */
1436 if (((!uns || is_sizetype) && overflow)
1437 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1440 TREE_OVERFLOW (t) = 1;
1441 TREE_CONSTANT_OVERFLOW (t) = 1;
1443 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1446 TREE_CONSTANT_OVERFLOW (t) = 1;
1450 t = force_fit_type (t, 1,
1451 ((!uns || is_sizetype) && overflow)
1452 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1453 TREE_CONSTANT_OVERFLOW (arg1)
1454 | TREE_CONSTANT_OVERFLOW (arg2));
1459 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1460 constant. We assume ARG1 and ARG2 have the same data type, or at least
1461 are the same kind of constant and the same machine mode.
1463 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1466 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1471 if (TREE_CODE (arg1) == INTEGER_CST)
1472 return int_const_binop (code, arg1, arg2, notrunc);
1474 if (TREE_CODE (arg1) == REAL_CST)
1476 enum machine_mode mode;
1479 REAL_VALUE_TYPE value;
1482 d1 = TREE_REAL_CST (arg1);
1483 d2 = TREE_REAL_CST (arg2);
1485 type = TREE_TYPE (arg1);
1486 mode = TYPE_MODE (type);
1488 /* Don't perform operation if we honor signaling NaNs and
1489 either operand is a NaN. */
1490 if (HONOR_SNANS (mode)
1491 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1494 /* Don't perform operation if it would raise a division
1495 by zero exception. */
1496 if (code == RDIV_EXPR
1497 && REAL_VALUES_EQUAL (d2, dconst0)
1498 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1501 /* If either operand is a NaN, just return it. Otherwise, set up
1502 for floating-point trap; we return an overflow. */
1503 if (REAL_VALUE_ISNAN (d1))
1505 else if (REAL_VALUE_ISNAN (d2))
1508 REAL_ARITHMETIC (value, code, d1, d2);
1510 t = build_real (type, real_value_truncate (mode, value));
1512 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1513 TREE_CONSTANT_OVERFLOW (t)
1515 | TREE_CONSTANT_OVERFLOW (arg1)
1516 | TREE_CONSTANT_OVERFLOW (arg2);
1519 if (TREE_CODE (arg1) == COMPLEX_CST)
1521 tree type = TREE_TYPE (arg1);
1522 tree r1 = TREE_REALPART (arg1);
1523 tree i1 = TREE_IMAGPART (arg1);
1524 tree r2 = TREE_REALPART (arg2);
1525 tree i2 = TREE_IMAGPART (arg2);
1531 t = build_complex (type,
1532 const_binop (PLUS_EXPR, r1, r2, notrunc),
1533 const_binop (PLUS_EXPR, i1, i2, notrunc));
1537 t = build_complex (type,
1538 const_binop (MINUS_EXPR, r1, r2, notrunc),
1539 const_binop (MINUS_EXPR, i1, i2, notrunc));
1543 t = build_complex (type,
1544 const_binop (MINUS_EXPR,
1545 const_binop (MULT_EXPR,
1547 const_binop (MULT_EXPR,
1550 const_binop (PLUS_EXPR,
1551 const_binop (MULT_EXPR,
1553 const_binop (MULT_EXPR,
1561 = const_binop (PLUS_EXPR,
1562 const_binop (MULT_EXPR, r2, r2, notrunc),
1563 const_binop (MULT_EXPR, i2, i2, notrunc),
1566 t = build_complex (type,
1568 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1569 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1570 const_binop (PLUS_EXPR,
1571 const_binop (MULT_EXPR, r1, r2,
1573 const_binop (MULT_EXPR, i1, i2,
1576 magsquared, notrunc),
1578 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1579 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1580 const_binop (MINUS_EXPR,
1581 const_binop (MULT_EXPR, i1, r2,
1583 const_binop (MULT_EXPR, r1, i2,
1586 magsquared, notrunc));
1598 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1599 indicates which particular sizetype to create. */
1602 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1604 return build_int_cst (sizetype_tab[(int) kind], number);
1607 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1608 is a tree code. The type of the result is taken from the operands.
1609 Both must be the same type integer type and it must be a size type.
1610 If the operands are constant, so is the result. */
1613 size_binop (enum tree_code code, tree arg0, tree arg1)
1615 tree type = TREE_TYPE (arg0);
1617 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1618 || type != TREE_TYPE (arg1))
1621 /* Handle the special case of two integer constants faster. */
1622 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1624 /* And some specific cases even faster than that. */
1625 if (code == PLUS_EXPR && integer_zerop (arg0))
1627 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1628 && integer_zerop (arg1))
1630 else if (code == MULT_EXPR && integer_onep (arg0))
1633 /* Handle general case of two integer constants. */
1634 return int_const_binop (code, arg0, arg1, 0);
1637 if (arg0 == error_mark_node || arg1 == error_mark_node)
1638 return error_mark_node;
1640 return fold (build2 (code, type, arg0, arg1));
1643 /* Given two values, either both of sizetype or both of bitsizetype,
1644 compute the difference between the two values. Return the value
1645 in signed type corresponding to the type of the operands. */
1648 size_diffop (tree arg0, tree arg1)
1650 tree type = TREE_TYPE (arg0);
1653 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1654 || type != TREE_TYPE (arg1))
1657 /* If the type is already signed, just do the simple thing. */
1658 if (!TYPE_UNSIGNED (type))
1659 return size_binop (MINUS_EXPR, arg0, arg1);
1661 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1663 /* If either operand is not a constant, do the conversions to the signed
1664 type and subtract. The hardware will do the right thing with any
1665 overflow in the subtraction. */
1666 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1667 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1668 fold_convert (ctype, arg1));
1670 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1671 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1672 overflow) and negate (which can't either). Special-case a result
1673 of zero while we're here. */
1674 if (tree_int_cst_equal (arg0, arg1))
1675 return fold_convert (ctype, integer_zero_node);
1676 else if (tree_int_cst_lt (arg1, arg0))
1677 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1679 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1680 fold_convert (ctype, size_binop (MINUS_EXPR,
1685 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1686 type TYPE. If no simplification can be done return NULL_TREE. */
1689 fold_convert_const (enum tree_code code, tree type, tree arg1)
1694 if (TREE_TYPE (arg1) == type)
1697 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1699 if (TREE_CODE (arg1) == INTEGER_CST)
1701 /* If we would build a constant wider than GCC supports,
1702 leave the conversion unfolded. */
1703 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1706 /* Given an integer constant, make new constant with new type,
1707 appropriately sign-extended or truncated. */
1708 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1709 TREE_INT_CST_HIGH (arg1));
1711 t = force_fit_type (t,
1712 /* Don't set the overflow when
1713 converting a pointer */
1714 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1715 (TREE_INT_CST_HIGH (arg1) < 0
1716 && (TYPE_UNSIGNED (type)
1717 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1718 | TREE_OVERFLOW (arg1),
1719 TREE_CONSTANT_OVERFLOW (arg1));
1722 else if (TREE_CODE (arg1) == REAL_CST)
1724 /* The following code implements the floating point to integer
1725 conversion rules required by the Java Language Specification,
1726 that IEEE NaNs are mapped to zero and values that overflow
1727 the target precision saturate, i.e. values greater than
1728 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1729 are mapped to INT_MIN. These semantics are allowed by the
1730 C and C++ standards that simply state that the behavior of
1731 FP-to-integer conversion is unspecified upon overflow. */
1733 HOST_WIDE_INT high, low;
1735 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1739 case FIX_TRUNC_EXPR:
1740 real_trunc (&r, VOIDmode, &x);
1744 real_ceil (&r, VOIDmode, &x);
1747 case FIX_FLOOR_EXPR:
1748 real_floor (&r, VOIDmode, &x);
1751 case FIX_ROUND_EXPR:
1752 real_round (&r, VOIDmode, &x);
1759 /* If R is NaN, return zero and show we have an overflow. */
1760 if (REAL_VALUE_ISNAN (r))
1767 /* See if R is less than the lower bound or greater than the
1772 tree lt = TYPE_MIN_VALUE (type);
1773 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1774 if (REAL_VALUES_LESS (r, l))
1777 high = TREE_INT_CST_HIGH (lt);
1778 low = TREE_INT_CST_LOW (lt);
1784 tree ut = TYPE_MAX_VALUE (type);
1787 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1788 if (REAL_VALUES_LESS (u, r))
1791 high = TREE_INT_CST_HIGH (ut);
1792 low = TREE_INT_CST_LOW (ut);
1798 REAL_VALUE_TO_INT (&low, &high, r);
1800 t = build_int_cst_wide (type, low, high);
1802 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1803 TREE_CONSTANT_OVERFLOW (arg1));
1807 else if (TREE_CODE (type) == REAL_TYPE)
1809 if (TREE_CODE (arg1) == INTEGER_CST)
1810 return build_real_from_int_cst (type, arg1);
1811 if (TREE_CODE (arg1) == REAL_CST)
1813 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1815 /* We make a copy of ARG1 so that we don't modify an
1816 existing constant tree. */
1817 t = copy_node (arg1);
1818 TREE_TYPE (t) = type;
1822 t = build_real (type,
1823 real_value_truncate (TYPE_MODE (type),
1824 TREE_REAL_CST (arg1)));
1826 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1827 TREE_CONSTANT_OVERFLOW (t)
1828 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1835 /* Convert expression ARG to type TYPE. Used by the middle-end for
1836 simple conversions in preference to calling the front-end's convert. */
1839 fold_convert (tree type, tree arg)
1841 tree orig = TREE_TYPE (arg);
1847 if (TREE_CODE (arg) == ERROR_MARK
1848 || TREE_CODE (type) == ERROR_MARK
1849 || TREE_CODE (orig) == ERROR_MARK)
1850 return error_mark_node;
1852 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1853 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1854 TYPE_MAIN_VARIANT (orig)))
1855 return fold (build1 (NOP_EXPR, type, arg));
1857 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1858 || TREE_CODE (type) == OFFSET_TYPE)
1860 if (TREE_CODE (arg) == INTEGER_CST)
1862 tem = fold_convert_const (NOP_EXPR, type, arg);
1863 if (tem != NULL_TREE)
1866 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1867 || TREE_CODE (orig) == OFFSET_TYPE)
1868 return fold (build1 (NOP_EXPR, type, arg));
1869 if (TREE_CODE (orig) == COMPLEX_TYPE)
1871 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1872 return fold_convert (type, tem);
1874 if (TREE_CODE (orig) == VECTOR_TYPE
1875 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
1876 return fold (build1 (NOP_EXPR, type, arg));
1878 else if (TREE_CODE (type) == REAL_TYPE)
1880 if (TREE_CODE (arg) == INTEGER_CST)
1882 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1883 if (tem != NULL_TREE)
1886 else if (TREE_CODE (arg) == REAL_CST)
1888 tem = fold_convert_const (NOP_EXPR, type, arg);
1889 if (tem != NULL_TREE)
1893 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1894 return fold (build1 (FLOAT_EXPR, type, arg));
1895 if (TREE_CODE (orig) == REAL_TYPE)
1896 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1898 if (TREE_CODE (orig) == COMPLEX_TYPE)
1900 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1901 return fold_convert (type, tem);
1904 else if (TREE_CODE (type) == COMPLEX_TYPE)
1906 if (INTEGRAL_TYPE_P (orig)
1907 || POINTER_TYPE_P (orig)
1908 || TREE_CODE (orig) == REAL_TYPE)
1909 return build2 (COMPLEX_EXPR, type,
1910 fold_convert (TREE_TYPE (type), arg),
1911 fold_convert (TREE_TYPE (type), integer_zero_node));
1912 if (TREE_CODE (orig) == COMPLEX_TYPE)
1916 if (TREE_CODE (arg) == COMPLEX_EXPR)
1918 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1919 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1920 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1923 arg = save_expr (arg);
1924 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1925 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1926 rpart = fold_convert (TREE_TYPE (type), rpart);
1927 ipart = fold_convert (TREE_TYPE (type), ipart);
1928 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1931 else if (TREE_CODE (type) == VECTOR_TYPE)
1933 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1934 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
1935 return fold (build1 (NOP_EXPR, type, arg));
1936 if (TREE_CODE (orig) == VECTOR_TYPE
1937 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)))
1938 return fold (build1 (NOP_EXPR, type, arg));
1940 else if (VOID_TYPE_P (type))
1941 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1945 /* Return an expr equal to X but certainly not valid as an lvalue. */
1950 /* We only need to wrap lvalue tree codes. */
1951 switch (TREE_CODE (x))
1963 case ARRAY_RANGE_REF:
1969 case PREINCREMENT_EXPR:
1970 case PREDECREMENT_EXPR:
1972 case TRY_CATCH_EXPR:
1973 case WITH_CLEANUP_EXPR:
1984 /* Assume the worst for front-end tree codes. */
1985 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
1989 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1992 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1993 Zero means allow extended lvalues. */
1995 int pedantic_lvalues;
1997 /* When pedantic, return an expr equal to X but certainly not valid as a
1998 pedantic lvalue. Otherwise, return X. */
2001 pedantic_non_lvalue (tree x)
2003 if (pedantic_lvalues)
2004 return non_lvalue (x);
2009 /* Given a tree comparison code, return the code that is the logical inverse
2010 of the given code. It is not safe to do this for floating-point
2011 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2012 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2014 static enum tree_code
2015 invert_tree_comparison (enum tree_code code, bool honor_nans)
2017 if (honor_nans && flag_trapping_math)
2027 return honor_nans ? UNLE_EXPR : LE_EXPR;
2029 return honor_nans ? UNLT_EXPR : LT_EXPR;
2031 return honor_nans ? UNGE_EXPR : GE_EXPR;
2033 return honor_nans ? UNGT_EXPR : GT_EXPR;
2047 return UNORDERED_EXPR;
2048 case UNORDERED_EXPR:
2049 return ORDERED_EXPR;
2055 /* Similar, but return the comparison that results if the operands are
2056 swapped. This is safe for floating-point. */
2059 swap_tree_comparison (enum tree_code code)
2080 /* Convert a comparison tree code from an enum tree_code representation
2081 into a compcode bit-based encoding. This function is the inverse of
2082 compcode_to_comparison. */
2084 static enum comparison_code
2085 comparison_to_compcode (enum tree_code code)
2102 return COMPCODE_ORD;
2103 case UNORDERED_EXPR:
2104 return COMPCODE_UNORD;
2106 return COMPCODE_UNLT;
2108 return COMPCODE_UNEQ;
2110 return COMPCODE_UNLE;
2112 return COMPCODE_UNGT;
2114 return COMPCODE_LTGT;
2116 return COMPCODE_UNGE;
2122 /* Convert a compcode bit-based encoding of a comparison operator back
2123 to GCC's enum tree_code representation. This function is the
2124 inverse of comparison_to_compcode. */
2126 static enum tree_code
2127 compcode_to_comparison (enum comparison_code code)
2144 return ORDERED_EXPR;
2145 case COMPCODE_UNORD:
2146 return UNORDERED_EXPR;
2164 /* Return a tree for the comparison which is the combination of
2165 doing the AND or OR (depending on CODE) of the two operations LCODE
2166 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2167 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2168 if this makes the transformation invalid. */
2171 combine_comparisons (enum tree_code code, enum tree_code lcode,
2172 enum tree_code rcode, tree truth_type,
2173 tree ll_arg, tree lr_arg)
2175 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2176 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2177 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2178 enum comparison_code compcode;
2182 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2183 compcode = lcompcode & rcompcode;
2186 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2187 compcode = lcompcode | rcompcode;
2196 /* Eliminate unordered comparisons, as well as LTGT and ORD
2197 which are not used unless the mode has NaNs. */
2198 compcode &= ~COMPCODE_UNORD;
2199 if (compcode == COMPCODE_LTGT)
2200 compcode = COMPCODE_NE;
2201 else if (compcode == COMPCODE_ORD)
2202 compcode = COMPCODE_TRUE;
2204 else if (flag_trapping_math)
2206 /* Check that the original operation and the optimized ones will trap
2207 under the same condition. */
2208 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2209 && (lcompcode != COMPCODE_EQ)
2210 && (lcompcode != COMPCODE_ORD);
2211 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2212 && (rcompcode != COMPCODE_EQ)
2213 && (rcompcode != COMPCODE_ORD);
2214 bool trap = (compcode & COMPCODE_UNORD) == 0
2215 && (compcode != COMPCODE_EQ)
2216 && (compcode != COMPCODE_ORD);
2218 /* In a short-circuited boolean expression the LHS might be
2219 such that the RHS, if evaluated, will never trap. For
2220 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2221 if neither x nor y is NaN. (This is a mixed blessing: for
2222 example, the expression above will never trap, hence
2223 optimizing it to x < y would be invalid). */
2224 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2225 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2228 /* If the comparison was short-circuited, and only the RHS
2229 trapped, we may now generate a spurious trap. */
2231 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2234 /* If we changed the conditions that cause a trap, we lose. */
2235 if ((ltrap || rtrap) != trap)
2239 if (compcode == COMPCODE_TRUE)
2240 return constant_boolean_node (true, truth_type);
2241 else if (compcode == COMPCODE_FALSE)
2242 return constant_boolean_node (false, truth_type);
2244 return fold (build2 (compcode_to_comparison (compcode),
2245 truth_type, ll_arg, lr_arg));
2248 /* Return nonzero if CODE is a tree code that represents a truth value. */
2251 truth_value_p (enum tree_code code)
2253 return (TREE_CODE_CLASS (code) == '<'
2254 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2255 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2256 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2259 /* Return nonzero if two operands (typically of the same tree node)
2260 are necessarily equal. If either argument has side-effects this
2261 function returns zero. FLAGS modifies behavior as follows:
2263 If OEP_ONLY_CONST is set, only return nonzero for constants.
2264 This function tests whether the operands are indistinguishable;
2265 it does not test whether they are equal using C's == operation.
2266 The distinction is important for IEEE floating point, because
2267 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2268 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2270 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2271 even though it may hold multiple values during a function.
2272 This is because a GCC tree node guarantees that nothing else is
2273 executed between the evaluation of its "operands" (which may often
2274 be evaluated in arbitrary order). Hence if the operands themselves
2275 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2276 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2277 unset means assuming isochronic (or instantaneous) tree equivalence.
2278 Unless comparing arbitrary expression trees, such as from different
2279 statements, this flag can usually be left unset.
2281 If OEP_PURE_SAME is set, then pure functions with identical arguments
2282 are considered the same. It is used when the caller has other ways
2283 to ensure that global memory is unchanged in between. */
2286 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2288 /* If one is specified and the other isn't, they aren't equal and if
2289 neither is specified, they are.
2291 ??? This is temporary and is meant only to handle the cases of the
2292 optional operands for COMPONENT_REF and ARRAY_REF. */
2293 if ((arg0 && !arg1) || (!arg0 && arg1))
2295 else if (!arg0 && !arg1)
2297 /* If either is ERROR_MARK, they aren't equal. */
2298 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2301 /* If both types don't have the same signedness, then we can't consider
2302 them equal. We must check this before the STRIP_NOPS calls
2303 because they may change the signedness of the arguments. */
2304 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2310 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2311 /* This is needed for conversions and for COMPONENT_REF.
2312 Might as well play it safe and always test this. */
2313 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2314 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2315 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2318 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2319 We don't care about side effects in that case because the SAVE_EXPR
2320 takes care of that for us. In all other cases, two expressions are
2321 equal if they have no side effects. If we have two identical
2322 expressions with side effects that should be treated the same due
2323 to the only side effects being identical SAVE_EXPR's, that will
2324 be detected in the recursive calls below. */
2325 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2326 && (TREE_CODE (arg0) == SAVE_EXPR
2327 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2330 /* Next handle constant cases, those for which we can return 1 even
2331 if ONLY_CONST is set. */
2332 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2333 switch (TREE_CODE (arg0))
2336 return (! TREE_CONSTANT_OVERFLOW (arg0)
2337 && ! TREE_CONSTANT_OVERFLOW (arg1)
2338 && tree_int_cst_equal (arg0, arg1));
2341 return (! TREE_CONSTANT_OVERFLOW (arg0)
2342 && ! TREE_CONSTANT_OVERFLOW (arg1)
2343 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2344 TREE_REAL_CST (arg1)));
2350 if (TREE_CONSTANT_OVERFLOW (arg0)
2351 || TREE_CONSTANT_OVERFLOW (arg1))
2354 v1 = TREE_VECTOR_CST_ELTS (arg0);
2355 v2 = TREE_VECTOR_CST_ELTS (arg1);
2358 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2361 v1 = TREE_CHAIN (v1);
2362 v2 = TREE_CHAIN (v2);
2369 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2371 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2375 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2376 && ! memcmp (TREE_STRING_POINTER (arg0),
2377 TREE_STRING_POINTER (arg1),
2378 TREE_STRING_LENGTH (arg0)));
2381 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2387 if (flags & OEP_ONLY_CONST)
2390 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2393 /* Two conversions are equal only if signedness and modes match. */
2394 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2395 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2396 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2399 return operand_equal_p (TREE_OPERAND (arg0, 0),
2400 TREE_OPERAND (arg1, 0), flags);
2404 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2405 TREE_OPERAND (arg1, 0), flags)
2406 && operand_equal_p (TREE_OPERAND (arg0, 1),
2407 TREE_OPERAND (arg1, 1), flags))
2410 /* For commutative ops, allow the other order. */
2411 return (commutative_tree_code (TREE_CODE (arg0))
2412 && operand_equal_p (TREE_OPERAND (arg0, 0),
2413 TREE_OPERAND (arg1, 1), flags)
2414 && operand_equal_p (TREE_OPERAND (arg0, 1),
2415 TREE_OPERAND (arg1, 0), flags));
2418 /* If either of the pointer (or reference) expressions we are
2419 dereferencing contain a side effect, these cannot be equal. */
2420 if (TREE_SIDE_EFFECTS (arg0)
2421 || TREE_SIDE_EFFECTS (arg1))
2424 switch (TREE_CODE (arg0))
2429 return operand_equal_p (TREE_OPERAND (arg0, 0),
2430 TREE_OPERAND (arg1, 0), flags);
2433 case ARRAY_RANGE_REF:
2434 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2435 TREE_OPERAND (arg1, 0), flags)
2436 && operand_equal_p (TREE_OPERAND (arg0, 1),
2437 TREE_OPERAND (arg1, 1), flags)
2438 && operand_equal_p (TREE_OPERAND (arg0, 2),
2439 TREE_OPERAND (arg1, 2), flags)
2440 && operand_equal_p (TREE_OPERAND (arg0, 3),
2441 TREE_OPERAND (arg1, 3), flags));
2445 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2446 TREE_OPERAND (arg1, 0), flags)
2447 && operand_equal_p (TREE_OPERAND (arg0, 1),
2448 TREE_OPERAND (arg1, 1), flags)
2449 && operand_equal_p (TREE_OPERAND (arg0, 2),
2450 TREE_OPERAND (arg1, 2), flags));
2454 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2455 TREE_OPERAND (arg1, 0), flags)
2456 && operand_equal_p (TREE_OPERAND (arg0, 1),
2457 TREE_OPERAND (arg1, 1), flags)
2458 && operand_equal_p (TREE_OPERAND (arg0, 2),
2459 TREE_OPERAND (arg1, 2), flags));
2465 switch (TREE_CODE (arg0))
2468 case TRUTH_NOT_EXPR:
2469 return operand_equal_p (TREE_OPERAND (arg0, 0),
2470 TREE_OPERAND (arg1, 0), flags);
2472 case TRUTH_ANDIF_EXPR:
2473 case TRUTH_ORIF_EXPR:
2474 return operand_equal_p (TREE_OPERAND (arg0, 0),
2475 TREE_OPERAND (arg1, 0), flags)
2476 && operand_equal_p (TREE_OPERAND (arg0, 1),
2477 TREE_OPERAND (arg1, 1), flags);
2479 case TRUTH_AND_EXPR:
2481 case TRUTH_XOR_EXPR:
2482 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2483 TREE_OPERAND (arg1, 0), flags)
2484 && operand_equal_p (TREE_OPERAND (arg0, 1),
2485 TREE_OPERAND (arg1, 1), flags))
2486 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2487 TREE_OPERAND (arg1, 1), flags)
2488 && operand_equal_p (TREE_OPERAND (arg0, 1),
2489 TREE_OPERAND (arg1, 0), flags));
2492 /* If the CALL_EXPRs call different functions, then they
2493 clearly can not be equal. */
2494 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2495 TREE_OPERAND (arg1, 0), flags))
2499 unsigned int cef = call_expr_flags (arg0);
2500 if (flags & OEP_PURE_SAME)
2501 cef &= ECF_CONST | ECF_PURE;
2508 /* Now see if all the arguments are the same. operand_equal_p
2509 does not handle TREE_LIST, so we walk the operands here
2510 feeding them to operand_equal_p. */
2511 arg0 = TREE_OPERAND (arg0, 1);
2512 arg1 = TREE_OPERAND (arg1, 1);
2513 while (arg0 && arg1)
2515 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2519 arg0 = TREE_CHAIN (arg0);
2520 arg1 = TREE_CHAIN (arg1);
2523 /* If we get here and both argument lists are exhausted
2524 then the CALL_EXPRs are equal. */
2525 return ! (arg0 || arg1);
2532 /* Consider __builtin_sqrt equal to sqrt. */
2533 return (TREE_CODE (arg0) == FUNCTION_DECL
2534 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2535 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2536 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2543 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2544 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2546 When in doubt, return 0. */
2549 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2551 int unsignedp1, unsignedpo;
2552 tree primarg0, primarg1, primother;
2553 unsigned int correct_width;
2555 if (operand_equal_p (arg0, arg1, 0))
2558 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2559 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2562 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2563 and see if the inner values are the same. This removes any
2564 signedness comparison, which doesn't matter here. */
2565 primarg0 = arg0, primarg1 = arg1;
2566 STRIP_NOPS (primarg0);
2567 STRIP_NOPS (primarg1);
2568 if (operand_equal_p (primarg0, primarg1, 0))
2571 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2572 actual comparison operand, ARG0.
2574 First throw away any conversions to wider types
2575 already present in the operands. */
2577 primarg1 = get_narrower (arg1, &unsignedp1);
2578 primother = get_narrower (other, &unsignedpo);
2580 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2581 if (unsignedp1 == unsignedpo
2582 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2583 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2585 tree type = TREE_TYPE (arg0);
2587 /* Make sure shorter operand is extended the right way
2588 to match the longer operand. */
2589 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2590 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2592 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2599 /* See if ARG is an expression that is either a comparison or is performing
2600 arithmetic on comparisons. The comparisons must only be comparing
2601 two different values, which will be stored in *CVAL1 and *CVAL2; if
2602 they are nonzero it means that some operands have already been found.
2603 No variables may be used anywhere else in the expression except in the
2604 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2605 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2607 If this is true, return 1. Otherwise, return zero. */
2610 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2612 enum tree_code code = TREE_CODE (arg);
2613 char class = TREE_CODE_CLASS (code);
2615 /* We can handle some of the 'e' cases here. */
2616 if (class == 'e' && code == TRUTH_NOT_EXPR)
2618 else if (class == 'e'
2619 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2620 || code == COMPOUND_EXPR))
2623 else if (class == 'e' && code == SAVE_EXPR
2624 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2626 /* If we've already found a CVAL1 or CVAL2, this expression is
2627 two complex to handle. */
2628 if (*cval1 || *cval2)
2638 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2641 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2642 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2643 cval1, cval2, save_p));
2649 if (code == COND_EXPR)
2650 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2651 cval1, cval2, save_p)
2652 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2653 cval1, cval2, save_p)
2654 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2655 cval1, cval2, save_p));
2659 /* First see if we can handle the first operand, then the second. For
2660 the second operand, we know *CVAL1 can't be zero. It must be that
2661 one side of the comparison is each of the values; test for the
2662 case where this isn't true by failing if the two operands
2665 if (operand_equal_p (TREE_OPERAND (arg, 0),
2666 TREE_OPERAND (arg, 1), 0))
2670 *cval1 = TREE_OPERAND (arg, 0);
2671 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2673 else if (*cval2 == 0)
2674 *cval2 = TREE_OPERAND (arg, 0);
2675 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2680 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2682 else if (*cval2 == 0)
2683 *cval2 = TREE_OPERAND (arg, 1);
2684 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2696 /* ARG is a tree that is known to contain just arithmetic operations and
2697 comparisons. Evaluate the operations in the tree substituting NEW0 for
2698 any occurrence of OLD0 as an operand of a comparison and likewise for
2702 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2704 tree type = TREE_TYPE (arg);
2705 enum tree_code code = TREE_CODE (arg);
2706 char class = TREE_CODE_CLASS (code);
2708 /* We can handle some of the 'e' cases here. */
2709 if (class == 'e' && code == TRUTH_NOT_EXPR)
2711 else if (class == 'e'
2712 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2718 return fold (build1 (code, type,
2719 eval_subst (TREE_OPERAND (arg, 0),
2720 old0, new0, old1, new1)));
2723 return fold (build2 (code, type,
2724 eval_subst (TREE_OPERAND (arg, 0),
2725 old0, new0, old1, new1),
2726 eval_subst (TREE_OPERAND (arg, 1),
2727 old0, new0, old1, new1)));
2733 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2736 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2739 return fold (build3 (code, type,
2740 eval_subst (TREE_OPERAND (arg, 0),
2741 old0, new0, old1, new1),
2742 eval_subst (TREE_OPERAND (arg, 1),
2743 old0, new0, old1, new1),
2744 eval_subst (TREE_OPERAND (arg, 2),
2745 old0, new0, old1, new1)));
2749 /* Fall through - ??? */
2753 tree arg0 = TREE_OPERAND (arg, 0);
2754 tree arg1 = TREE_OPERAND (arg, 1);
2756 /* We need to check both for exact equality and tree equality. The
2757 former will be true if the operand has a side-effect. In that
2758 case, we know the operand occurred exactly once. */
2760 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2762 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2765 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2767 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2770 return fold (build2 (code, type, arg0, arg1));
2778 /* Return a tree for the case when the result of an expression is RESULT
2779 converted to TYPE and OMITTED was previously an operand of the expression
2780 but is now not needed (e.g., we folded OMITTED * 0).
2782 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2783 the conversion of RESULT to TYPE. */
2786 omit_one_operand (tree type, tree result, tree omitted)
2788 tree t = fold_convert (type, result);
2790 if (TREE_SIDE_EFFECTS (omitted))
2791 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2793 return non_lvalue (t);
2796 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2799 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2801 tree t = fold_convert (type, result);
2803 if (TREE_SIDE_EFFECTS (omitted))
2804 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2806 return pedantic_non_lvalue (t);
2809 /* Return a tree for the case when the result of an expression is RESULT
2810 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2811 of the expression but are now not needed.
2813 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2814 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2815 evaluated before OMITTED2. Otherwise, if neither has side effects,
2816 just do the conversion of RESULT to TYPE. */
2819 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2821 tree t = fold_convert (type, result);
2823 if (TREE_SIDE_EFFECTS (omitted2))
2824 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2825 if (TREE_SIDE_EFFECTS (omitted1))
2826 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2828 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2832 /* Return a simplified tree node for the truth-negation of ARG. This
2833 never alters ARG itself. We assume that ARG is an operation that
2834 returns a truth value (0 or 1).
2836 FIXME: one would think we would fold the result, but it causes
2837 problems with the dominator optimizer. */
2839 invert_truthvalue (tree arg)
2841 tree type = TREE_TYPE (arg);
2842 enum tree_code code = TREE_CODE (arg);
2844 if (code == ERROR_MARK)
2847 /* If this is a comparison, we can simply invert it, except for
2848 floating-point non-equality comparisons, in which case we just
2849 enclose a TRUTH_NOT_EXPR around what we have. */
2851 if (TREE_CODE_CLASS (code) == '<')
2853 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2854 if (FLOAT_TYPE_P (op_type)
2855 && flag_trapping_math
2856 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2857 && code != NE_EXPR && code != EQ_EXPR)
2858 return build1 (TRUTH_NOT_EXPR, type, arg);
2861 code = invert_tree_comparison (code,
2862 HONOR_NANS (TYPE_MODE (op_type)));
2863 if (code == ERROR_MARK)
2864 return build1 (TRUTH_NOT_EXPR, type, arg);
2866 return build2 (code, type,
2867 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2874 return fold_convert (type,
2875 build_int_cst (NULL_TREE, integer_zerop (arg)));
2877 case TRUTH_AND_EXPR:
2878 return build2 (TRUTH_OR_EXPR, type,
2879 invert_truthvalue (TREE_OPERAND (arg, 0)),
2880 invert_truthvalue (TREE_OPERAND (arg, 1)));
2883 return build2 (TRUTH_AND_EXPR, type,
2884 invert_truthvalue (TREE_OPERAND (arg, 0)),
2885 invert_truthvalue (TREE_OPERAND (arg, 1)));
2887 case TRUTH_XOR_EXPR:
2888 /* Here we can invert either operand. We invert the first operand
2889 unless the second operand is a TRUTH_NOT_EXPR in which case our
2890 result is the XOR of the first operand with the inside of the
2891 negation of the second operand. */
2893 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2894 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2895 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2897 return build2 (TRUTH_XOR_EXPR, type,
2898 invert_truthvalue (TREE_OPERAND (arg, 0)),
2899 TREE_OPERAND (arg, 1));
2901 case TRUTH_ANDIF_EXPR:
2902 return build2 (TRUTH_ORIF_EXPR, type,
2903 invert_truthvalue (TREE_OPERAND (arg, 0)),
2904 invert_truthvalue (TREE_OPERAND (arg, 1)));
2906 case TRUTH_ORIF_EXPR:
2907 return build2 (TRUTH_ANDIF_EXPR, type,
2908 invert_truthvalue (TREE_OPERAND (arg, 0)),
2909 invert_truthvalue (TREE_OPERAND (arg, 1)));
2911 case TRUTH_NOT_EXPR:
2912 return TREE_OPERAND (arg, 0);
2915 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2916 invert_truthvalue (TREE_OPERAND (arg, 1)),
2917 invert_truthvalue (TREE_OPERAND (arg, 2)));
2920 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2921 invert_truthvalue (TREE_OPERAND (arg, 1)));
2923 case NON_LVALUE_EXPR:
2924 return invert_truthvalue (TREE_OPERAND (arg, 0));
2927 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2932 return build1 (TREE_CODE (arg), type,
2933 invert_truthvalue (TREE_OPERAND (arg, 0)));
2936 if (!integer_onep (TREE_OPERAND (arg, 1)))
2938 return build2 (EQ_EXPR, type, arg,
2939 fold_convert (type, integer_zero_node));
2942 return build1 (TRUTH_NOT_EXPR, type, arg);
2944 case CLEANUP_POINT_EXPR:
2945 return build1 (CLEANUP_POINT_EXPR, type,
2946 invert_truthvalue (TREE_OPERAND (arg, 0)));
2951 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2953 return build1 (TRUTH_NOT_EXPR, type, arg);
2956 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2957 operands are another bit-wise operation with a common input. If so,
2958 distribute the bit operations to save an operation and possibly two if
2959 constants are involved. For example, convert
2960 (A | B) & (A | C) into A | (B & C)
2961 Further simplification will occur if B and C are constants.
2963 If this optimization cannot be done, 0 will be returned. */
2966 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2971 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2972 || TREE_CODE (arg0) == code
2973 || (TREE_CODE (arg0) != BIT_AND_EXPR
2974 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2977 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2979 common = TREE_OPERAND (arg0, 0);
2980 left = TREE_OPERAND (arg0, 1);
2981 right = TREE_OPERAND (arg1, 1);
2983 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2985 common = TREE_OPERAND (arg0, 0);
2986 left = TREE_OPERAND (arg0, 1);
2987 right = TREE_OPERAND (arg1, 0);
2989 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2991 common = TREE_OPERAND (arg0, 1);
2992 left = TREE_OPERAND (arg0, 0);
2993 right = TREE_OPERAND (arg1, 1);
2995 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2997 common = TREE_OPERAND (arg0, 1);
2998 left = TREE_OPERAND (arg0, 0);
2999 right = TREE_OPERAND (arg1, 0);
3004 return fold (build2 (TREE_CODE (arg0), type, common,
3005 fold (build2 (code, type, left, right))));
3008 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3009 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3012 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3015 tree result = build3 (BIT_FIELD_REF, type, inner,
3016 size_int (bitsize), bitsize_int (bitpos));
3018 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3023 /* Optimize a bit-field compare.
3025 There are two cases: First is a compare against a constant and the
3026 second is a comparison of two items where the fields are at the same
3027 bit position relative to the start of a chunk (byte, halfword, word)
3028 large enough to contain it. In these cases we can avoid the shift
3029 implicit in bitfield extractions.
3031 For constants, we emit a compare of the shifted constant with the
3032 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3033 compared. For two fields at the same position, we do the ANDs with the
3034 similar mask and compare the result of the ANDs.
3036 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3037 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3038 are the left and right operands of the comparison, respectively.
3040 If the optimization described above can be done, we return the resulting
3041 tree. Otherwise we return zero. */
3044 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3047 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3048 tree type = TREE_TYPE (lhs);
3049 tree signed_type, unsigned_type;
3050 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3051 enum machine_mode lmode, rmode, nmode;
3052 int lunsignedp, runsignedp;
3053 int lvolatilep = 0, rvolatilep = 0;
3054 tree linner, rinner = NULL_TREE;
3058 /* Get all the information about the extractions being done. If the bit size
3059 if the same as the size of the underlying object, we aren't doing an
3060 extraction at all and so can do nothing. We also don't want to
3061 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3062 then will no longer be able to replace it. */
3063 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3064 &lunsignedp, &lvolatilep);
3065 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3066 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3071 /* If this is not a constant, we can only do something if bit positions,
3072 sizes, and signedness are the same. */
3073 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3074 &runsignedp, &rvolatilep);
3076 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3077 || lunsignedp != runsignedp || offset != 0
3078 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3082 /* See if we can find a mode to refer to this field. We should be able to,
3083 but fail if we can't. */
3084 nmode = get_best_mode (lbitsize, lbitpos,
3085 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3086 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3087 TYPE_ALIGN (TREE_TYPE (rinner))),
3088 word_mode, lvolatilep || rvolatilep);
3089 if (nmode == VOIDmode)
3092 /* Set signed and unsigned types of the precision of this mode for the
3094 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3095 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3097 /* Compute the bit position and size for the new reference and our offset
3098 within it. If the new reference is the same size as the original, we
3099 won't optimize anything, so return zero. */
3100 nbitsize = GET_MODE_BITSIZE (nmode);
3101 nbitpos = lbitpos & ~ (nbitsize - 1);
3103 if (nbitsize == lbitsize)
3106 if (BYTES_BIG_ENDIAN)
3107 lbitpos = nbitsize - lbitsize - lbitpos;
3109 /* Make the mask to be used against the extracted field. */
3110 mask = build_int_cst (unsigned_type, -1);
3111 mask = force_fit_type (mask, 0, false, false);
3112 mask = fold_convert (unsigned_type, mask);
3113 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3114 mask = const_binop (RSHIFT_EXPR, mask,
3115 size_int (nbitsize - lbitsize - lbitpos), 0);
3118 /* If not comparing with constant, just rework the comparison
3120 return build2 (code, compare_type,
3121 build2 (BIT_AND_EXPR, unsigned_type,
3122 make_bit_field_ref (linner, unsigned_type,
3123 nbitsize, nbitpos, 1),
3125 build2 (BIT_AND_EXPR, unsigned_type,
3126 make_bit_field_ref (rinner, unsigned_type,
3127 nbitsize, nbitpos, 1),
3130 /* Otherwise, we are handling the constant case. See if the constant is too
3131 big for the field. Warn and return a tree of for 0 (false) if so. We do
3132 this not only for its own sake, but to avoid having to test for this
3133 error case below. If we didn't, we might generate wrong code.
3135 For unsigned fields, the constant shifted right by the field length should
3136 be all zero. For signed fields, the high-order bits should agree with
3141 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3142 fold_convert (unsigned_type, rhs),
3143 size_int (lbitsize), 0)))
3145 warning ("comparison is always %d due to width of bit-field",
3147 return constant_boolean_node (code == NE_EXPR, compare_type);
3152 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3153 size_int (lbitsize - 1), 0);
3154 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3156 warning ("comparison is always %d due to width of bit-field",
3158 return constant_boolean_node (code == NE_EXPR, compare_type);
3162 /* Single-bit compares should always be against zero. */
3163 if (lbitsize == 1 && ! integer_zerop (rhs))
3165 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3166 rhs = fold_convert (type, integer_zero_node);
3169 /* Make a new bitfield reference, shift the constant over the
3170 appropriate number of bits and mask it with the computed mask
3171 (in case this was a signed field). If we changed it, make a new one. */
3172 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3175 TREE_SIDE_EFFECTS (lhs) = 1;
3176 TREE_THIS_VOLATILE (lhs) = 1;
3179 rhs = fold (const_binop (BIT_AND_EXPR,
3180 const_binop (LSHIFT_EXPR,
3181 fold_convert (unsigned_type, rhs),
3182 size_int (lbitpos), 0),
3185 return build2 (code, compare_type,
3186 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3190 /* Subroutine for fold_truthop: decode a field reference.
3192 If EXP is a comparison reference, we return the innermost reference.
3194 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3195 set to the starting bit number.
3197 If the innermost field can be completely contained in a mode-sized
3198 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3200 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3201 otherwise it is not changed.
3203 *PUNSIGNEDP is set to the signedness of the field.
3205 *PMASK is set to the mask used. This is either contained in a
3206 BIT_AND_EXPR or derived from the width of the field.
3208 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3210 Return 0 if this is not a component reference or is one that we can't
3211 do anything with. */
3214 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3215 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3216 int *punsignedp, int *pvolatilep,
3217 tree *pmask, tree *pand_mask)
3219 tree outer_type = 0;
3221 tree mask, inner, offset;
3223 unsigned int precision;
3225 /* All the optimizations using this function assume integer fields.
3226 There are problems with FP fields since the type_for_size call
3227 below can fail for, e.g., XFmode. */
3228 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3231 /* We are interested in the bare arrangement of bits, so strip everything
3232 that doesn't affect the machine mode. However, record the type of the
3233 outermost expression if it may matter below. */
3234 if (TREE_CODE (exp) == NOP_EXPR
3235 || TREE_CODE (exp) == CONVERT_EXPR
3236 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3237 outer_type = TREE_TYPE (exp);
3240 if (TREE_CODE (exp) == BIT_AND_EXPR)
3242 and_mask = TREE_OPERAND (exp, 1);
3243 exp = TREE_OPERAND (exp, 0);
3244 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3245 if (TREE_CODE (and_mask) != INTEGER_CST)
3249 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3250 punsignedp, pvolatilep);
3251 if ((inner == exp && and_mask == 0)
3252 || *pbitsize < 0 || offset != 0
3253 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3256 /* If the number of bits in the reference is the same as the bitsize of
3257 the outer type, then the outer type gives the signedness. Otherwise
3258 (in case of a small bitfield) the signedness is unchanged. */
3259 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3260 *punsignedp = TYPE_UNSIGNED (outer_type);
3262 /* Compute the mask to access the bitfield. */
3263 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3264 precision = TYPE_PRECISION (unsigned_type);
3266 mask = build_int_cst (unsigned_type, -1);
3267 mask = force_fit_type (mask, 0, false, false);
3269 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3270 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3272 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3274 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3275 fold_convert (unsigned_type, and_mask), mask));
3278 *pand_mask = and_mask;
3282 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3286 all_ones_mask_p (tree mask, int size)
3288 tree type = TREE_TYPE (mask);
3289 unsigned int precision = TYPE_PRECISION (type);
3292 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3293 tmask = force_fit_type (tmask, 0, false, false);
3296 tree_int_cst_equal (mask,
3297 const_binop (RSHIFT_EXPR,
3298 const_binop (LSHIFT_EXPR, tmask,
3299 size_int (precision - size),
3301 size_int (precision - size), 0));
3304 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3305 represents the sign bit of EXP's type. If EXP represents a sign
3306 or zero extension, also test VAL against the unextended type.
3307 The return value is the (sub)expression whose sign bit is VAL,
3308 or NULL_TREE otherwise. */
3311 sign_bit_p (tree exp, tree val)
3313 unsigned HOST_WIDE_INT mask_lo, lo;
3314 HOST_WIDE_INT mask_hi, hi;
3318 /* Tree EXP must have an integral type. */
3319 t = TREE_TYPE (exp);
3320 if (! INTEGRAL_TYPE_P (t))
3323 /* Tree VAL must be an integer constant. */
3324 if (TREE_CODE (val) != INTEGER_CST
3325 || TREE_CONSTANT_OVERFLOW (val))
3328 width = TYPE_PRECISION (t);
3329 if (width > HOST_BITS_PER_WIDE_INT)
3331 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3334 mask_hi = ((unsigned HOST_WIDE_INT) -1
3335 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3341 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3344 mask_lo = ((unsigned HOST_WIDE_INT) -1
3345 >> (HOST_BITS_PER_WIDE_INT - width));
3348 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3349 treat VAL as if it were unsigned. */
3350 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3351 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3354 /* Handle extension from a narrower type. */
3355 if (TREE_CODE (exp) == NOP_EXPR
3356 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3357 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3362 /* Subroutine for fold_truthop: determine if an operand is simple enough
3363 to be evaluated unconditionally. */
3366 simple_operand_p (tree exp)
3368 /* Strip any conversions that don't change the machine mode. */
3369 while ((TREE_CODE (exp) == NOP_EXPR
3370 || TREE_CODE (exp) == CONVERT_EXPR)
3371 && (TYPE_MODE (TREE_TYPE (exp))
3372 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3373 exp = TREE_OPERAND (exp, 0);
3375 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3377 && ! TREE_ADDRESSABLE (exp)
3378 && ! TREE_THIS_VOLATILE (exp)
3379 && ! DECL_NONLOCAL (exp)
3380 /* Don't regard global variables as simple. They may be
3381 allocated in ways unknown to the compiler (shared memory,
3382 #pragma weak, etc). */
3383 && ! TREE_PUBLIC (exp)
3384 && ! DECL_EXTERNAL (exp)
3385 /* Loading a static variable is unduly expensive, but global
3386 registers aren't expensive. */
3387 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3390 /* The following functions are subroutines to fold_range_test and allow it to
3391 try to change a logical combination of comparisons into a range test.
3394 X == 2 || X == 3 || X == 4 || X == 5
3398 (unsigned) (X - 2) <= 3
3400 We describe each set of comparisons as being either inside or outside
3401 a range, using a variable named like IN_P, and then describe the
3402 range with a lower and upper bound. If one of the bounds is omitted,
3403 it represents either the highest or lowest value of the type.
3405 In the comments below, we represent a range by two numbers in brackets
3406 preceded by a "+" to designate being inside that range, or a "-" to
3407 designate being outside that range, so the condition can be inverted by
3408 flipping the prefix. An omitted bound is represented by a "-". For
3409 example, "- [-, 10]" means being outside the range starting at the lowest
3410 possible value and ending at 10, in other words, being greater than 10.
3411 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3414 We set up things so that the missing bounds are handled in a consistent
3415 manner so neither a missing bound nor "true" and "false" need to be
3416 handled using a special case. */
3418 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3419 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3420 and UPPER1_P are nonzero if the respective argument is an upper bound
3421 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3422 must be specified for a comparison. ARG1 will be converted to ARG0's
3423 type if both are specified. */
3426 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3427 tree arg1, int upper1_p)
3433 /* If neither arg represents infinity, do the normal operation.
3434 Else, if not a comparison, return infinity. Else handle the special
3435 comparison rules. Note that most of the cases below won't occur, but
3436 are handled for consistency. */
3438 if (arg0 != 0 && arg1 != 0)
3440 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3441 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3443 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3446 if (TREE_CODE_CLASS (code) != '<')
3449 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3450 for neither. In real maths, we cannot assume open ended ranges are
3451 the same. But, this is computer arithmetic, where numbers are finite.
3452 We can therefore make the transformation of any unbounded range with
3453 the value Z, Z being greater than any representable number. This permits
3454 us to treat unbounded ranges as equal. */
3455 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3456 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3460 result = sgn0 == sgn1;
3463 result = sgn0 != sgn1;
3466 result = sgn0 < sgn1;
3469 result = sgn0 <= sgn1;
3472 result = sgn0 > sgn1;
3475 result = sgn0 >= sgn1;
3481 return constant_boolean_node (result, type);
3484 /* Given EXP, a logical expression, set the range it is testing into
3485 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3486 actually being tested. *PLOW and *PHIGH will be made of the same type
3487 as the returned expression. If EXP is not a comparison, we will most
3488 likely not be returning a useful value and range. */
3491 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3493 enum tree_code code;
3494 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3495 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3497 tree low, high, n_low, n_high;
3499 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3500 and see if we can refine the range. Some of the cases below may not
3501 happen, but it doesn't seem worth worrying about this. We "continue"
3502 the outer loop when we've changed something; otherwise we "break"
3503 the switch, which will "break" the while. */
3506 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3510 code = TREE_CODE (exp);
3511 exp_type = TREE_TYPE (exp);
3513 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3515 if (first_rtl_op (code) > 0)
3516 arg0 = TREE_OPERAND (exp, 0);
3517 if (TREE_CODE_CLASS (code) == '<'
3518 || TREE_CODE_CLASS (code) == '1'
3519 || TREE_CODE_CLASS (code) == '2')
3520 arg0_type = TREE_TYPE (arg0);
3521 if (TREE_CODE_CLASS (code) == '2'
3522 || TREE_CODE_CLASS (code) == '<'
3523 || (TREE_CODE_CLASS (code) == 'e'
3524 && TREE_CODE_LENGTH (code) > 1))
3525 arg1 = TREE_OPERAND (exp, 1);
3530 case TRUTH_NOT_EXPR:
3531 in_p = ! in_p, exp = arg0;
3534 case EQ_EXPR: case NE_EXPR:
3535 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3536 /* We can only do something if the range is testing for zero
3537 and if the second operand is an integer constant. Note that
3538 saying something is "in" the range we make is done by
3539 complementing IN_P since it will set in the initial case of
3540 being not equal to zero; "out" is leaving it alone. */
3541 if (low == 0 || high == 0
3542 || ! integer_zerop (low) || ! integer_zerop (high)
3543 || TREE_CODE (arg1) != INTEGER_CST)
3548 case NE_EXPR: /* - [c, c] */
3551 case EQ_EXPR: /* + [c, c] */
3552 in_p = ! in_p, low = high = arg1;
3554 case GT_EXPR: /* - [-, c] */
3555 low = 0, high = arg1;
3557 case GE_EXPR: /* + [c, -] */
3558 in_p = ! in_p, low = arg1, high = 0;
3560 case LT_EXPR: /* - [c, -] */
3561 low = arg1, high = 0;
3563 case LE_EXPR: /* + [-, c] */
3564 in_p = ! in_p, low = 0, high = arg1;
3570 /* If this is an unsigned comparison, we also know that EXP is
3571 greater than or equal to zero. We base the range tests we make
3572 on that fact, so we record it here so we can parse existing
3573 range tests. We test arg0_type since often the return type
3574 of, e.g. EQ_EXPR, is boolean. */
3575 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3577 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3579 fold_convert (arg0_type, integer_zero_node),
3583 in_p = n_in_p, low = n_low, high = n_high;
3585 /* If the high bound is missing, but we have a nonzero low
3586 bound, reverse the range so it goes from zero to the low bound
3588 if (high == 0 && low && ! integer_zerop (low))
3591 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3592 integer_one_node, 0);
3593 low = fold_convert (arg0_type, integer_zero_node);
3601 /* (-x) IN [a,b] -> x in [-b, -a] */
3602 n_low = range_binop (MINUS_EXPR, exp_type,
3603 fold_convert (exp_type, integer_zero_node),
3605 n_high = range_binop (MINUS_EXPR, exp_type,
3606 fold_convert (exp_type, integer_zero_node),
3608 low = n_low, high = n_high;
3614 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3615 fold_convert (exp_type, integer_one_node));
3618 case PLUS_EXPR: case MINUS_EXPR:
3619 if (TREE_CODE (arg1) != INTEGER_CST)
3622 /* If EXP is signed, any overflow in the computation is undefined,
3623 so we don't worry about it so long as our computations on
3624 the bounds don't overflow. For unsigned, overflow is defined
3625 and this is exactly the right thing. */
3626 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3627 arg0_type, low, 0, arg1, 0);
3628 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3629 arg0_type, high, 1, arg1, 0);
3630 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3631 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3634 /* Check for an unsigned range which has wrapped around the maximum
3635 value thus making n_high < n_low, and normalize it. */
3636 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3638 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3639 integer_one_node, 0);
3640 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3641 integer_one_node, 0);
3643 /* If the range is of the form +/- [ x+1, x ], we won't
3644 be able to normalize it. But then, it represents the
3645 whole range or the empty set, so make it
3647 if (tree_int_cst_equal (n_low, low)
3648 && tree_int_cst_equal (n_high, high))
3654 low = n_low, high = n_high;
3659 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3660 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3663 if (! INTEGRAL_TYPE_P (arg0_type)
3664 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3665 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3668 n_low = low, n_high = high;
3671 n_low = fold_convert (arg0_type, n_low);
3674 n_high = fold_convert (arg0_type, n_high);
3677 /* If we're converting arg0 from an unsigned type, to exp,
3678 a signed type, we will be doing the comparison as unsigned.
3679 The tests above have already verified that LOW and HIGH
3682 So we have to ensure that we will handle large unsigned
3683 values the same way that the current signed bounds treat
3686 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3689 tree equiv_type = lang_hooks.types.type_for_mode
3690 (TYPE_MODE (arg0_type), 1);
3692 /* A range without an upper bound is, naturally, unbounded.
3693 Since convert would have cropped a very large value, use
3694 the max value for the destination type. */
3696 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3697 : TYPE_MAX_VALUE (arg0_type);
3699 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3700 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3701 fold_convert (arg0_type,
3703 fold_convert (arg0_type,
3704 integer_one_node)));
3706 /* If the low bound is specified, "and" the range with the
3707 range for which the original unsigned value will be
3711 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3712 1, n_low, n_high, 1,
3713 fold_convert (arg0_type,
3718 in_p = (n_in_p == in_p);
3722 /* Otherwise, "or" the range with the range of the input
3723 that will be interpreted as negative. */
3724 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3725 0, n_low, n_high, 1,
3726 fold_convert (arg0_type,
3731 in_p = (in_p != n_in_p);
3736 low = n_low, high = n_high;
3746 /* If EXP is a constant, we can evaluate whether this is true or false. */
3747 if (TREE_CODE (exp) == INTEGER_CST)
3749 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3751 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3757 *pin_p = in_p, *plow = low, *phigh = high;
3761 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3762 type, TYPE, return an expression to test if EXP is in (or out of, depending
3763 on IN_P) the range. Return 0 if the test couldn't be created. */
3766 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3768 tree etype = TREE_TYPE (exp);
3773 value = build_range_check (type, exp, 1, low, high);
3775 return invert_truthvalue (value);
3780 if (low == 0 && high == 0)
3781 return fold_convert (type, integer_one_node);
3784 return fold (build2 (LE_EXPR, type, exp, high));
3787 return fold (build2 (GE_EXPR, type, exp, low));
3789 if (operand_equal_p (low, high, 0))
3790 return fold (build2 (EQ_EXPR, type, exp, low));
3792 if (integer_zerop (low))
3794 if (! TYPE_UNSIGNED (etype))
3796 etype = lang_hooks.types.unsigned_type (etype);
3797 high = fold_convert (etype, high);
3798 exp = fold_convert (etype, exp);
3800 return build_range_check (type, exp, 1, 0, high);
3803 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3804 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3806 unsigned HOST_WIDE_INT lo;
3810 prec = TYPE_PRECISION (etype);
3811 if (prec <= HOST_BITS_PER_WIDE_INT)
3814 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3818 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3819 lo = (unsigned HOST_WIDE_INT) -1;
3822 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3824 if (TYPE_UNSIGNED (etype))
3826 etype = lang_hooks.types.signed_type (etype);
3827 exp = fold_convert (etype, exp);
3829 return fold (build2 (GT_EXPR, type, exp,
3830 fold_convert (etype, integer_zero_node)));
3834 value = const_binop (MINUS_EXPR, high, low, 0);
3835 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3837 tree utype, minv, maxv;
3839 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3840 for the type in question, as we rely on this here. */
3841 switch (TREE_CODE (etype))
3846 utype = lang_hooks.types.unsigned_type (etype);
3847 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3848 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3849 integer_one_node, 1);
3850 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3851 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3855 high = fold_convert (etype, high);
3856 low = fold_convert (etype, low);
3857 exp = fold_convert (etype, exp);
3858 value = const_binop (MINUS_EXPR, high, low, 0);
3866 if (value != 0 && ! TREE_OVERFLOW (value))
3867 return build_range_check (type,
3868 fold (build2 (MINUS_EXPR, etype, exp, low)),
3869 1, fold_convert (etype, integer_zero_node),
3875 /* Given two ranges, see if we can merge them into one. Return 1 if we
3876 can, 0 if we can't. Set the output range into the specified parameters. */
3879 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3880 tree high0, int in1_p, tree low1, tree high1)
3888 int lowequal = ((low0 == 0 && low1 == 0)
3889 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3890 low0, 0, low1, 0)));
3891 int highequal = ((high0 == 0 && high1 == 0)
3892 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3893 high0, 1, high1, 1)));
3895 /* Make range 0 be the range that starts first, or ends last if they
3896 start at the same value. Swap them if it isn't. */
3897 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3900 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3901 high1, 1, high0, 1))))
3903 temp = in0_p, in0_p = in1_p, in1_p = temp;
3904 tem = low0, low0 = low1, low1 = tem;
3905 tem = high0, high0 = high1, high1 = tem;
3908 /* Now flag two cases, whether the ranges are disjoint or whether the
3909 second range is totally subsumed in the first. Note that the tests
3910 below are simplified by the ones above. */
3911 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3912 high0, 1, low1, 0));
3913 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3914 high1, 1, high0, 1));
3916 /* We now have four cases, depending on whether we are including or
3917 excluding the two ranges. */
3920 /* If they don't overlap, the result is false. If the second range
3921 is a subset it is the result. Otherwise, the range is from the start
3922 of the second to the end of the first. */
3924 in_p = 0, low = high = 0;
3926 in_p = 1, low = low1, high = high1;
3928 in_p = 1, low = low1, high = high0;
3931 else if (in0_p && ! in1_p)
3933 /* If they don't overlap, the result is the first range. If they are
3934 equal, the result is false. If the second range is a subset of the
3935 first, and the ranges begin at the same place, we go from just after
3936 the end of the first range to the end of the second. If the second
3937 range is not a subset of the first, or if it is a subset and both
3938 ranges end at the same place, the range starts at the start of the
3939 first range and ends just before the second range.
3940 Otherwise, we can't describe this as a single range. */
3942 in_p = 1, low = low0, high = high0;
3943 else if (lowequal && highequal)
3944 in_p = 0, low = high = 0;
3945 else if (subset && lowequal)
3947 in_p = 1, high = high0;
3948 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3949 integer_one_node, 0);
3951 else if (! subset || highequal)
3953 in_p = 1, low = low0;
3954 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3955 integer_one_node, 0);
3961 else if (! in0_p && in1_p)
3963 /* If they don't overlap, the result is the second range. If the second
3964 is a subset of the first, the result is false. Otherwise,
3965 the range starts just after the first range and ends at the
3966 end of the second. */
3968 in_p = 1, low = low1, high = high1;
3969 else if (subset || highequal)
3970 in_p = 0, low = high = 0;
3973 in_p = 1, high = high1;
3974 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3975 integer_one_node, 0);
3981 /* The case where we are excluding both ranges. Here the complex case
3982 is if they don't overlap. In that case, the only time we have a
3983 range is if they are adjacent. If the second is a subset of the
3984 first, the result is the first. Otherwise, the range to exclude
3985 starts at the beginning of the first range and ends at the end of the
3989 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3990 range_binop (PLUS_EXPR, NULL_TREE,
3992 integer_one_node, 1),
3994 in_p = 0, low = low0, high = high1;
3997 /* Canonicalize - [min, x] into - [-, x]. */
3998 if (low0 && TREE_CODE (low0) == INTEGER_CST)
3999 switch (TREE_CODE (TREE_TYPE (low0)))
4002 if (TYPE_PRECISION (TREE_TYPE (low0))
4003 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4008 if (tree_int_cst_equal (low0,
4009 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4013 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4014 && integer_zerop (low0))
4021 /* Canonicalize - [x, max] into - [x, -]. */
4022 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4023 switch (TREE_CODE (TREE_TYPE (high1)))
4026 if (TYPE_PRECISION (TREE_TYPE (high1))
4027 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4032 if (tree_int_cst_equal (high1,
4033 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4037 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4038 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4040 integer_one_node, 1)))
4047 /* The ranges might be also adjacent between the maximum and
4048 minimum values of the given type. For
4049 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4050 return + [x + 1, y - 1]. */
4051 if (low0 == 0 && high1 == 0)
4053 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4054 integer_one_node, 1);
4055 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4056 integer_one_node, 0);
4057 if (low == 0 || high == 0)
4067 in_p = 0, low = low0, high = high0;
4069 in_p = 0, low = low0, high = high1;
4072 *pin_p = in_p, *plow = low, *phigh = high;
4077 /* Subroutine of fold, looking inside expressions of the form
4078 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4079 of the COND_EXPR. This function is being used also to optimize
4080 A op B ? C : A, by reversing the comparison first.
4082 Return a folded expression whose code is not a COND_EXPR
4083 anymore, or NULL_TREE if no folding opportunity is found. */
4086 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4088 enum tree_code comp_code = TREE_CODE (arg0);
4089 tree arg00 = TREE_OPERAND (arg0, 0);
4090 tree arg01 = TREE_OPERAND (arg0, 1);
4091 tree arg1_type = TREE_TYPE (arg1);
4097 /* If we have A op 0 ? A : -A, consider applying the following
4100 A == 0? A : -A same as -A
4101 A != 0? A : -A same as A
4102 A >= 0? A : -A same as abs (A)
4103 A > 0? A : -A same as abs (A)
4104 A <= 0? A : -A same as -abs (A)
4105 A < 0? A : -A same as -abs (A)
4107 None of these transformations work for modes with signed
4108 zeros. If A is +/-0, the first two transformations will
4109 change the sign of the result (from +0 to -0, or vice
4110 versa). The last four will fix the sign of the result,
4111 even though the original expressions could be positive or
4112 negative, depending on the sign of A.
4114 Note that all these transformations are correct if A is
4115 NaN, since the two alternatives (A and -A) are also NaNs. */
4116 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4117 ? real_zerop (arg01)
4118 : integer_zerop (arg01))
4119 && TREE_CODE (arg2) == NEGATE_EXPR
4120 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4124 tem = fold_convert (arg1_type, arg1);
4125 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4127 return pedantic_non_lvalue (fold_convert (type, arg1));
4130 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4131 arg1 = fold_convert (lang_hooks.types.signed_type
4132 (TREE_TYPE (arg1)), arg1);
4133 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4134 return pedantic_non_lvalue (fold_convert (type, tem));
4137 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4138 arg1 = fold_convert (lang_hooks.types.signed_type
4139 (TREE_TYPE (arg1)), arg1);
4140 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4141 return negate_expr (fold_convert (type, tem));
4146 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4147 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4148 both transformations are correct when A is NaN: A != 0
4149 is then true, and A == 0 is false. */
4151 if (integer_zerop (arg01) && integer_zerop (arg2))
4153 if (comp_code == NE_EXPR)
4154 return pedantic_non_lvalue (fold_convert (type, arg1));
4155 else if (comp_code == EQ_EXPR)
4156 return fold_convert (type, integer_zero_node);
4159 /* Try some transformations of A op B ? A : B.
4161 A == B? A : B same as B
4162 A != B? A : B same as A
4163 A >= B? A : B same as max (A, B)
4164 A > B? A : B same as max (B, A)
4165 A <= B? A : B same as min (A, B)
4166 A < B? A : B same as min (B, A)
4168 As above, these transformations don't work in the presence
4169 of signed zeros. For example, if A and B are zeros of
4170 opposite sign, the first two transformations will change
4171 the sign of the result. In the last four, the original
4172 expressions give different results for (A=+0, B=-0) and
4173 (A=-0, B=+0), but the transformed expressions do not.
4175 The first two transformations are correct if either A or B
4176 is a NaN. In the first transformation, the condition will
4177 be false, and B will indeed be chosen. In the case of the
4178 second transformation, the condition A != B will be true,
4179 and A will be chosen.
4181 The conversions to max() and min() are not correct if B is
4182 a number and A is not. The conditions in the original
4183 expressions will be false, so all four give B. The min()
4184 and max() versions would give a NaN instead. */
4185 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4187 tree comp_op0 = arg00;
4188 tree comp_op1 = arg01;
4189 tree comp_type = TREE_TYPE (comp_op0);
4191 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4192 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4202 return pedantic_non_lvalue (fold_convert (type, arg2));
4204 return pedantic_non_lvalue (fold_convert (type, arg1));
4207 /* In C++ a ?: expression can be an lvalue, so put the
4208 operand which will be used if they are equal first
4209 so that we can convert this back to the
4210 corresponding COND_EXPR. */
4211 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4213 comp_op0 = fold_convert (comp_type, comp_op0);
4214 comp_op1 = fold_convert (comp_type, comp_op1);
4215 tem = fold (build2 (MIN_EXPR, comp_type,
4216 (comp_code == LE_EXPR
4217 ? comp_op0 : comp_op1),
4218 (comp_code == LE_EXPR
4219 ? comp_op1 : comp_op0)));
4220 return pedantic_non_lvalue (fold_convert (type, tem));
4225 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4227 comp_op0 = fold_convert (comp_type, comp_op0);
4228 comp_op1 = fold_convert (comp_type, comp_op1);
4229 tem = fold (build2 (MAX_EXPR, comp_type,
4230 (comp_code == GE_EXPR
4231 ? comp_op0 : comp_op1),
4232 (comp_code == GE_EXPR
4233 ? comp_op1 : comp_op0)));
4234 tem = fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1));
4235 return pedantic_non_lvalue (fold_convert (type, tem));
4243 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4244 we might still be able to simplify this. For example,
4245 if C1 is one less or one more than C2, this might have started
4246 out as a MIN or MAX and been transformed by this function.
4247 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4249 if (INTEGRAL_TYPE_P (type)
4250 && TREE_CODE (arg01) == INTEGER_CST
4251 && TREE_CODE (arg2) == INTEGER_CST)
4255 /* We can replace A with C1 in this case. */
4256 arg1 = fold_convert (type, arg01);
4257 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4260 /* If C1 is C2 + 1, this is min(A, C2). */
4261 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4263 && operand_equal_p (arg01,
4264 const_binop (PLUS_EXPR, arg2,
4265 integer_one_node, 0),
4267 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4268 type, arg1, arg2)));
4272 /* If C1 is C2 - 1, this is min(A, C2). */
4273 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4275 && operand_equal_p (arg01,
4276 const_binop (MINUS_EXPR, arg2,
4277 integer_one_node, 0),
4279 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4280 type, arg1, arg2)));
4284 /* If C1 is C2 - 1, this is max(A, C2). */
4285 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4287 && operand_equal_p (arg01,
4288 const_binop (MINUS_EXPR, arg2,
4289 integer_one_node, 0),
4291 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4292 type, arg1, arg2)));
4296 /* If C1 is C2 + 1, this is max(A, C2). */
4297 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4299 && operand_equal_p (arg01,
4300 const_binop (PLUS_EXPR, arg2,
4301 integer_one_node, 0),
4303 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4304 type, arg1, arg2)));
4317 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4318 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4321 /* EXP is some logical combination of boolean tests. See if we can
4322 merge it into some range test. Return the new tree if so. */
4325 fold_range_test (tree exp)
4327 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4328 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4329 int in0_p, in1_p, in_p;
4330 tree low0, low1, low, high0, high1, high;
4331 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4332 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4335 /* If this is an OR operation, invert both sides; we will invert
4336 again at the end. */
4338 in0_p = ! in0_p, in1_p = ! in1_p;
4340 /* If both expressions are the same, if we can merge the ranges, and we
4341 can build the range test, return it or it inverted. If one of the
4342 ranges is always true or always false, consider it to be the same
4343 expression as the other. */
4344 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4345 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4347 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4349 : rhs != 0 ? rhs : integer_zero_node,
4351 return or_op ? invert_truthvalue (tem) : tem;
4353 /* On machines where the branch cost is expensive, if this is a
4354 short-circuited branch and the underlying object on both sides
4355 is the same, make a non-short-circuit operation. */
4356 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4357 && lhs != 0 && rhs != 0
4358 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4359 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4360 && operand_equal_p (lhs, rhs, 0))
4362 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4363 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4364 which cases we can't do this. */
4365 if (simple_operand_p (lhs))
4366 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4367 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4368 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4369 TREE_OPERAND (exp, 1));
4371 else if (lang_hooks.decls.global_bindings_p () == 0
4372 && ! CONTAINS_PLACEHOLDER_P (lhs))
4374 tree common = save_expr (lhs);
4376 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4377 or_op ? ! in0_p : in0_p,
4379 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4380 or_op ? ! in1_p : in1_p,
4382 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4383 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4384 TREE_TYPE (exp), lhs, rhs);
4391 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4392 bit value. Arrange things so the extra bits will be set to zero if and
4393 only if C is signed-extended to its full width. If MASK is nonzero,
4394 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4397 unextend (tree c, int p, int unsignedp, tree mask)
4399 tree type = TREE_TYPE (c);
4400 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4403 if (p == modesize || unsignedp)
4406 /* We work by getting just the sign bit into the low-order bit, then
4407 into the high-order bit, then sign-extend. We then XOR that value
4409 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4410 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4412 /* We must use a signed type in order to get an arithmetic right shift.
4413 However, we must also avoid introducing accidental overflows, so that
4414 a subsequent call to integer_zerop will work. Hence we must
4415 do the type conversion here. At this point, the constant is either
4416 zero or one, and the conversion to a signed type can never overflow.
4417 We could get an overflow if this conversion is done anywhere else. */
4418 if (TYPE_UNSIGNED (type))
4419 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4421 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4422 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4424 temp = const_binop (BIT_AND_EXPR, temp,
4425 fold_convert (TREE_TYPE (c), mask), 0);
4426 /* If necessary, convert the type back to match the type of C. */
4427 if (TYPE_UNSIGNED (type))
4428 temp = fold_convert (type, temp);
4430 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4433 /* Find ways of folding logical expressions of LHS and RHS:
4434 Try to merge two comparisons to the same innermost item.
4435 Look for range tests like "ch >= '0' && ch <= '9'".
4436 Look for combinations of simple terms on machines with expensive branches
4437 and evaluate the RHS unconditionally.
4439 For example, if we have p->a == 2 && p->b == 4 and we can make an
4440 object large enough to span both A and B, we can do this with a comparison
4441 against the object ANDed with the a mask.
4443 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4444 operations to do this with one comparison.
4446 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4447 function and the one above.
4449 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4450 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4452 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4455 We return the simplified tree or 0 if no optimization is possible. */
4458 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4460 /* If this is the "or" of two comparisons, we can do something if
4461 the comparisons are NE_EXPR. If this is the "and", we can do something
4462 if the comparisons are EQ_EXPR. I.e.,
4463 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4465 WANTED_CODE is this operation code. For single bit fields, we can
4466 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4467 comparison for one-bit fields. */
4469 enum tree_code wanted_code;
4470 enum tree_code lcode, rcode;
4471 tree ll_arg, lr_arg, rl_arg, rr_arg;
4472 tree ll_inner, lr_inner, rl_inner, rr_inner;
4473 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4474 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4475 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4476 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4477 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4478 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4479 enum machine_mode lnmode, rnmode;
4480 tree ll_mask, lr_mask, rl_mask, rr_mask;
4481 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4482 tree l_const, r_const;
4483 tree lntype, rntype, result;
4484 int first_bit, end_bit;
4487 /* Start by getting the comparison codes. Fail if anything is volatile.
4488 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4489 it were surrounded with a NE_EXPR. */
4491 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4494 lcode = TREE_CODE (lhs);
4495 rcode = TREE_CODE (rhs);
4497 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4499 lhs = build2 (NE_EXPR, truth_type, lhs,
4500 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4504 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4506 rhs = build2 (NE_EXPR, truth_type, rhs,
4507 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4511 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4514 ll_arg = TREE_OPERAND (lhs, 0);
4515 lr_arg = TREE_OPERAND (lhs, 1);
4516 rl_arg = TREE_OPERAND (rhs, 0);
4517 rr_arg = TREE_OPERAND (rhs, 1);
4519 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4520 if (simple_operand_p (ll_arg)
4521 && simple_operand_p (lr_arg))
4524 if (operand_equal_p (ll_arg, rl_arg, 0)
4525 && operand_equal_p (lr_arg, rr_arg, 0))
4527 result = combine_comparisons (code, lcode, rcode,
4528 truth_type, ll_arg, lr_arg);
4532 else if (operand_equal_p (ll_arg, rr_arg, 0)
4533 && operand_equal_p (lr_arg, rl_arg, 0))
4535 result = combine_comparisons (code, lcode,
4536 swap_tree_comparison (rcode),
4537 truth_type, ll_arg, lr_arg);
4543 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4544 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4546 /* If the RHS can be evaluated unconditionally and its operands are
4547 simple, it wins to evaluate the RHS unconditionally on machines
4548 with expensive branches. In this case, this isn't a comparison
4549 that can be merged. Avoid doing this if the RHS is a floating-point
4550 comparison since those can trap. */
4552 if (BRANCH_COST >= 2
4553 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4554 && simple_operand_p (rl_arg)
4555 && simple_operand_p (rr_arg))
4557 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4558 if (code == TRUTH_OR_EXPR
4559 && lcode == NE_EXPR && integer_zerop (lr_arg)
4560 && rcode == NE_EXPR && integer_zerop (rr_arg)
4561 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4562 return build2 (NE_EXPR, truth_type,
4563 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4565 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4567 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4568 if (code == TRUTH_AND_EXPR
4569 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4570 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4571 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4572 return build2 (EQ_EXPR, truth_type,
4573 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4575 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4577 return build2 (code, truth_type, lhs, rhs);
4580 /* See if the comparisons can be merged. Then get all the parameters for
4583 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4584 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4588 ll_inner = decode_field_reference (ll_arg,
4589 &ll_bitsize, &ll_bitpos, &ll_mode,
4590 &ll_unsignedp, &volatilep, &ll_mask,
4592 lr_inner = decode_field_reference (lr_arg,
4593 &lr_bitsize, &lr_bitpos, &lr_mode,
4594 &lr_unsignedp, &volatilep, &lr_mask,
4596 rl_inner = decode_field_reference (rl_arg,
4597 &rl_bitsize, &rl_bitpos, &rl_mode,
4598 &rl_unsignedp, &volatilep, &rl_mask,
4600 rr_inner = decode_field_reference (rr_arg,
4601 &rr_bitsize, &rr_bitpos, &rr_mode,
4602 &rr_unsignedp, &volatilep, &rr_mask,
4605 /* It must be true that the inner operation on the lhs of each
4606 comparison must be the same if we are to be able to do anything.
4607 Then see if we have constants. If not, the same must be true for
4609 if (volatilep || ll_inner == 0 || rl_inner == 0
4610 || ! operand_equal_p (ll_inner, rl_inner, 0))
4613 if (TREE_CODE (lr_arg) == INTEGER_CST
4614 && TREE_CODE (rr_arg) == INTEGER_CST)
4615 l_const = lr_arg, r_const = rr_arg;
4616 else if (lr_inner == 0 || rr_inner == 0
4617 || ! operand_equal_p (lr_inner, rr_inner, 0))
4620 l_const = r_const = 0;
4622 /* If either comparison code is not correct for our logical operation,
4623 fail. However, we can convert a one-bit comparison against zero into
4624 the opposite comparison against that bit being set in the field. */
4626 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4627 if (lcode != wanted_code)
4629 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4631 /* Make the left operand unsigned, since we are only interested
4632 in the value of one bit. Otherwise we are doing the wrong
4641 /* This is analogous to the code for l_const above. */
4642 if (rcode != wanted_code)
4644 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4653 /* After this point all optimizations will generate bit-field
4654 references, which we might not want. */
4655 if (! lang_hooks.can_use_bit_fields_p ())
4658 /* See if we can find a mode that contains both fields being compared on
4659 the left. If we can't, fail. Otherwise, update all constants and masks
4660 to be relative to a field of that size. */
4661 first_bit = MIN (ll_bitpos, rl_bitpos);
4662 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4663 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4664 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4666 if (lnmode == VOIDmode)
4669 lnbitsize = GET_MODE_BITSIZE (lnmode);
4670 lnbitpos = first_bit & ~ (lnbitsize - 1);
4671 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4672 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4674 if (BYTES_BIG_ENDIAN)
4676 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4677 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4680 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4681 size_int (xll_bitpos), 0);
4682 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4683 size_int (xrl_bitpos), 0);
4687 l_const = fold_convert (lntype, l_const);
4688 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4689 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4690 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4691 fold (build1 (BIT_NOT_EXPR,
4695 warning ("comparison is always %d", wanted_code == NE_EXPR);
4697 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4702 r_const = fold_convert (lntype, r_const);
4703 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4704 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4705 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4706 fold (build1 (BIT_NOT_EXPR,
4710 warning ("comparison is always %d", wanted_code == NE_EXPR);
4712 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4716 /* If the right sides are not constant, do the same for it. Also,
4717 disallow this optimization if a size or signedness mismatch occurs
4718 between the left and right sides. */
4721 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4722 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4723 /* Make sure the two fields on the right
4724 correspond to the left without being swapped. */
4725 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4728 first_bit = MIN (lr_bitpos, rr_bitpos);
4729 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4730 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4731 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4733 if (rnmode == VOIDmode)
4736 rnbitsize = GET_MODE_BITSIZE (rnmode);
4737 rnbitpos = first_bit & ~ (rnbitsize - 1);
4738 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4739 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4741 if (BYTES_BIG_ENDIAN)
4743 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4744 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4747 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4748 size_int (xlr_bitpos), 0);
4749 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4750 size_int (xrr_bitpos), 0);
4752 /* Make a mask that corresponds to both fields being compared.
4753 Do this for both items being compared. If the operands are the
4754 same size and the bits being compared are in the same position
4755 then we can do this by masking both and comparing the masked
4757 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4758 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4759 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4761 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4762 ll_unsignedp || rl_unsignedp);
4763 if (! all_ones_mask_p (ll_mask, lnbitsize))
4764 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4766 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4767 lr_unsignedp || rr_unsignedp);
4768 if (! all_ones_mask_p (lr_mask, rnbitsize))
4769 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4771 return build2 (wanted_code, truth_type, lhs, rhs);
4774 /* There is still another way we can do something: If both pairs of
4775 fields being compared are adjacent, we may be able to make a wider
4776 field containing them both.
4778 Note that we still must mask the lhs/rhs expressions. Furthermore,
4779 the mask must be shifted to account for the shift done by
4780 make_bit_field_ref. */
4781 if ((ll_bitsize + ll_bitpos == rl_bitpos
4782 && lr_bitsize + lr_bitpos == rr_bitpos)
4783 || (ll_bitpos == rl_bitpos + rl_bitsize
4784 && lr_bitpos == rr_bitpos + rr_bitsize))
4788 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4789 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4790 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4791 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4793 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4794 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4795 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4796 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4798 /* Convert to the smaller type before masking out unwanted bits. */
4800 if (lntype != rntype)
4802 if (lnbitsize > rnbitsize)
4804 lhs = fold_convert (rntype, lhs);
4805 ll_mask = fold_convert (rntype, ll_mask);
4808 else if (lnbitsize < rnbitsize)
4810 rhs = fold_convert (lntype, rhs);
4811 lr_mask = fold_convert (lntype, lr_mask);
4816 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4817 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4819 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4820 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4822 return build2 (wanted_code, truth_type, lhs, rhs);
4828 /* Handle the case of comparisons with constants. If there is something in
4829 common between the masks, those bits of the constants must be the same.
4830 If not, the condition is always false. Test for this to avoid generating
4831 incorrect code below. */
4832 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4833 if (! integer_zerop (result)
4834 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4835 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4837 if (wanted_code == NE_EXPR)
4839 warning ("`or' of unmatched not-equal tests is always 1");
4840 return constant_boolean_node (true, truth_type);
4844 warning ("`and' of mutually exclusive equal-tests is always 0");
4845 return constant_boolean_node (false, truth_type);
4849 /* Construct the expression we will return. First get the component
4850 reference we will make. Unless the mask is all ones the width of
4851 that field, perform the mask operation. Then compare with the
4853 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4854 ll_unsignedp || rl_unsignedp);
4856 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4857 if (! all_ones_mask_p (ll_mask, lnbitsize))
4858 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4860 return build2 (wanted_code, truth_type, result,
4861 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4864 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4868 optimize_minmax_comparison (tree t)
4870 tree type = TREE_TYPE (t);
4871 tree arg0 = TREE_OPERAND (t, 0);
4872 enum tree_code op_code;
4873 tree comp_const = TREE_OPERAND (t, 1);
4875 int consts_equal, consts_lt;
4878 STRIP_SIGN_NOPS (arg0);
4880 op_code = TREE_CODE (arg0);
4881 minmax_const = TREE_OPERAND (arg0, 1);
4882 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4883 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4884 inner = TREE_OPERAND (arg0, 0);
4886 /* If something does not permit us to optimize, return the original tree. */
4887 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4888 || TREE_CODE (comp_const) != INTEGER_CST
4889 || TREE_CONSTANT_OVERFLOW (comp_const)
4890 || TREE_CODE (minmax_const) != INTEGER_CST
4891 || TREE_CONSTANT_OVERFLOW (minmax_const))
4894 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4895 and GT_EXPR, doing the rest with recursive calls using logical
4897 switch (TREE_CODE (t))
4899 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4901 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4905 fold (build2 (TRUTH_ORIF_EXPR, type,
4906 optimize_minmax_comparison
4907 (build2 (EQ_EXPR, type, arg0, comp_const)),
4908 optimize_minmax_comparison
4909 (build2 (GT_EXPR, type, arg0, comp_const))));
4912 if (op_code == MAX_EXPR && consts_equal)
4913 /* MAX (X, 0) == 0 -> X <= 0 */
4914 return fold (build2 (LE_EXPR, type, inner, comp_const));
4916 else if (op_code == MAX_EXPR && consts_lt)
4917 /* MAX (X, 0) == 5 -> X == 5 */
4918 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4920 else if (op_code == MAX_EXPR)
4921 /* MAX (X, 0) == -1 -> false */
4922 return omit_one_operand (type, integer_zero_node, inner);
4924 else if (consts_equal)
4925 /* MIN (X, 0) == 0 -> X >= 0 */
4926 return fold (build2 (GE_EXPR, type, inner, comp_const));
4929 /* MIN (X, 0) == 5 -> false */
4930 return omit_one_operand (type, integer_zero_node, inner);
4933 /* MIN (X, 0) == -1 -> X == -1 */
4934 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4937 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4938 /* MAX (X, 0) > 0 -> X > 0
4939 MAX (X, 0) > 5 -> X > 5 */
4940 return fold (build2 (GT_EXPR, type, inner, comp_const));
4942 else if (op_code == MAX_EXPR)
4943 /* MAX (X, 0) > -1 -> true */
4944 return omit_one_operand (type, integer_one_node, inner);
4946 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4947 /* MIN (X, 0) > 0 -> false
4948 MIN (X, 0) > 5 -> false */
4949 return omit_one_operand (type, integer_zero_node, inner);
4952 /* MIN (X, 0) > -1 -> X > -1 */
4953 return fold (build2 (GT_EXPR, type, inner, comp_const));
4960 /* T is an integer expression that is being multiplied, divided, or taken a
4961 modulus (CODE says which and what kind of divide or modulus) by a
4962 constant C. See if we can eliminate that operation by folding it with
4963 other operations already in T. WIDE_TYPE, if non-null, is a type that
4964 should be used for the computation if wider than our type.
4966 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4967 (X * 2) + (Y * 4). We must, however, be assured that either the original
4968 expression would not overflow or that overflow is undefined for the type
4969 in the language in question.
4971 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4972 the machine has a multiply-accumulate insn or that this is part of an
4973 addressing calculation.
4975 If we return a non-null expression, it is an equivalent form of the
4976 original computation, but need not be in the original type. */
4979 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4981 /* To avoid exponential search depth, refuse to allow recursion past
4982 three levels. Beyond that (1) it's highly unlikely that we'll find
4983 something interesting and (2) we've probably processed it before
4984 when we built the inner expression. */
4993 ret = extract_muldiv_1 (t, c, code, wide_type);
5000 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5002 tree type = TREE_TYPE (t);
5003 enum tree_code tcode = TREE_CODE (t);
5004 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5005 > GET_MODE_SIZE (TYPE_MODE (type)))
5006 ? wide_type : type);
5008 int same_p = tcode == code;
5009 tree op0 = NULL_TREE, op1 = NULL_TREE;
5011 /* Don't deal with constants of zero here; they confuse the code below. */
5012 if (integer_zerop (c))
5015 if (TREE_CODE_CLASS (tcode) == '1')
5016 op0 = TREE_OPERAND (t, 0);
5018 if (TREE_CODE_CLASS (tcode) == '2')
5019 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5021 /* Note that we need not handle conditional operations here since fold
5022 already handles those cases. So just do arithmetic here. */
5026 /* For a constant, we can always simplify if we are a multiply
5027 or (for divide and modulus) if it is a multiple of our constant. */
5028 if (code == MULT_EXPR
5029 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5030 return const_binop (code, fold_convert (ctype, t),
5031 fold_convert (ctype, c), 0);
5034 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5035 /* If op0 is an expression ... */
5036 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5037 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5038 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5039 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5040 /* ... and is unsigned, and its type is smaller than ctype,
5041 then we cannot pass through as widening. */
5042 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5043 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5044 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5045 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5046 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5047 /* ... or this is a truncation (t is narrower than op0),
5048 then we cannot pass through this narrowing. */
5049 || (GET_MODE_SIZE (TYPE_MODE (type))
5050 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5051 /* ... or signedness changes for division or modulus,
5052 then we cannot pass through this conversion. */
5053 || (code != MULT_EXPR
5054 && (TYPE_UNSIGNED (ctype)
5055 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5058 /* Pass the constant down and see if we can make a simplification. If
5059 we can, replace this expression with the inner simplification for
5060 possible later conversion to our or some other type. */
5061 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5062 && TREE_CODE (t2) == INTEGER_CST
5063 && ! TREE_CONSTANT_OVERFLOW (t2)
5064 && (0 != (t1 = extract_muldiv (op0, t2, code,
5066 ? ctype : NULL_TREE))))
5070 case NEGATE_EXPR: case ABS_EXPR:
5071 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5072 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5075 case MIN_EXPR: case MAX_EXPR:
5076 /* If widening the type changes the signedness, then we can't perform
5077 this optimization as that changes the result. */
5078 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5081 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5082 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5083 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5085 if (tree_int_cst_sgn (c) < 0)
5086 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5088 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5089 fold_convert (ctype, t2)));
5093 case LSHIFT_EXPR: case RSHIFT_EXPR:
5094 /* If the second operand is constant, this is a multiplication
5095 or floor division, by a power of two, so we can treat it that
5096 way unless the multiplier or divisor overflows. Signed
5097 left-shift overflow is implementation-defined rather than
5098 undefined in C90, so do not convert signed left shift into
5100 if (TREE_CODE (op1) == INTEGER_CST
5101 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5102 /* const_binop may not detect overflow correctly,
5103 so check for it explicitly here. */
5104 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5105 && TREE_INT_CST_HIGH (op1) == 0
5106 && 0 != (t1 = fold_convert (ctype,
5107 const_binop (LSHIFT_EXPR,
5110 && ! TREE_OVERFLOW (t1))
5111 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5112 ? MULT_EXPR : FLOOR_DIV_EXPR,
5113 ctype, fold_convert (ctype, op0), t1),
5114 c, code, wide_type);
5117 case PLUS_EXPR: case MINUS_EXPR:
5118 /* See if we can eliminate the operation on both sides. If we can, we
5119 can return a new PLUS or MINUS. If we can't, the only remaining
5120 cases where we can do anything are if the second operand is a
5122 t1 = extract_muldiv (op0, c, code, wide_type);
5123 t2 = extract_muldiv (op1, c, code, wide_type);
5124 if (t1 != 0 && t2 != 0
5125 && (code == MULT_EXPR
5126 /* If not multiplication, we can only do this if both operands
5127 are divisible by c. */
5128 || (multiple_of_p (ctype, op0, c)
5129 && multiple_of_p (ctype, op1, c))))
5130 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5131 fold_convert (ctype, t2)));
5133 /* If this was a subtraction, negate OP1 and set it to be an addition.
5134 This simplifies the logic below. */
5135 if (tcode == MINUS_EXPR)
5136 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5138 if (TREE_CODE (op1) != INTEGER_CST)
5141 /* If either OP1 or C are negative, this optimization is not safe for
5142 some of the division and remainder types while for others we need
5143 to change the code. */
5144 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5146 if (code == CEIL_DIV_EXPR)
5147 code = FLOOR_DIV_EXPR;
5148 else if (code == FLOOR_DIV_EXPR)
5149 code = CEIL_DIV_EXPR;
5150 else if (code != MULT_EXPR
5151 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5155 /* If it's a multiply or a division/modulus operation of a multiple
5156 of our constant, do the operation and verify it doesn't overflow. */
5157 if (code == MULT_EXPR
5158 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5160 op1 = const_binop (code, fold_convert (ctype, op1),
5161 fold_convert (ctype, c), 0);
5162 /* We allow the constant to overflow with wrapping semantics. */
5164 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5170 /* If we have an unsigned type is not a sizetype, we cannot widen
5171 the operation since it will change the result if the original
5172 computation overflowed. */
5173 if (TYPE_UNSIGNED (ctype)
5174 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5178 /* If we were able to eliminate our operation from the first side,
5179 apply our operation to the second side and reform the PLUS. */
5180 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5181 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5183 /* The last case is if we are a multiply. In that case, we can
5184 apply the distributive law to commute the multiply and addition
5185 if the multiplication of the constants doesn't overflow. */
5186 if (code == MULT_EXPR)
5187 return fold (build2 (tcode, ctype,
5188 fold (build2 (code, ctype,
5189 fold_convert (ctype, op0),
5190 fold_convert (ctype, c))),
5196 /* We have a special case here if we are doing something like
5197 (C * 8) % 4 since we know that's zero. */
5198 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5199 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5200 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5201 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5202 return omit_one_operand (type, integer_zero_node, op0);
5204 /* ... fall through ... */
5206 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5207 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5208 /* If we can extract our operation from the LHS, do so and return a
5209 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5210 do something only if the second operand is a constant. */
5212 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5213 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5214 fold_convert (ctype, op1)));
5215 else if (tcode == MULT_EXPR && code == MULT_EXPR
5216 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5217 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5218 fold_convert (ctype, t1)));
5219 else if (TREE_CODE (op1) != INTEGER_CST)
5222 /* If these are the same operation types, we can associate them
5223 assuming no overflow. */
5225 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5226 fold_convert (ctype, c), 0))
5227 && ! TREE_OVERFLOW (t1))
5228 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5230 /* If these operations "cancel" each other, we have the main
5231 optimizations of this pass, which occur when either constant is a
5232 multiple of the other, in which case we replace this with either an
5233 operation or CODE or TCODE.
5235 If we have an unsigned type that is not a sizetype, we cannot do
5236 this since it will change the result if the original computation
5238 if ((! TYPE_UNSIGNED (ctype)
5239 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5241 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5242 || (tcode == MULT_EXPR
5243 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5244 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5246 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5247 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5248 fold_convert (ctype,
5249 const_binop (TRUNC_DIV_EXPR,
5251 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5252 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5253 fold_convert (ctype,
5254 const_binop (TRUNC_DIV_EXPR,
5266 /* Return a node which has the indicated constant VALUE (either 0 or
5267 1), and is of the indicated TYPE. */
5270 constant_boolean_node (int value, tree type)
5272 if (type == integer_type_node)
5273 return value ? integer_one_node : integer_zero_node;
5274 else if (type == boolean_type_node)
5275 return value ? boolean_true_node : boolean_false_node;
5276 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5277 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5278 : integer_zero_node);
5280 return build_int_cst (type, value);
5283 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5284 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5285 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5286 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5287 COND is the first argument to CODE; otherwise (as in the example
5288 given here), it is the second argument. TYPE is the type of the
5289 original expression. Return NULL_TREE if no simplification is
5293 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5294 tree cond, tree arg, int cond_first_p)
5296 tree test, true_value, false_value;
5297 tree lhs = NULL_TREE;
5298 tree rhs = NULL_TREE;
5300 /* This transformation is only worthwhile if we don't have to wrap
5301 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5302 one of the branches once its pushed inside the COND_EXPR. */
5303 if (!TREE_CONSTANT (arg))
5306 if (TREE_CODE (cond) == COND_EXPR)
5308 test = TREE_OPERAND (cond, 0);
5309 true_value = TREE_OPERAND (cond, 1);
5310 false_value = TREE_OPERAND (cond, 2);
5311 /* If this operand throws an expression, then it does not make
5312 sense to try to perform a logical or arithmetic operation
5314 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5316 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5321 tree testtype = TREE_TYPE (cond);
5323 true_value = constant_boolean_node (true, testtype);
5324 false_value = constant_boolean_node (false, testtype);
5328 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5329 : build2 (code, type, arg, true_value));
5331 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5332 : build2 (code, type, arg, false_value));
5334 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5335 return fold_convert (type, test);
5339 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5341 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5342 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5343 ADDEND is the same as X.
5345 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5346 and finite. The problematic cases are when X is zero, and its mode
5347 has signed zeros. In the case of rounding towards -infinity,
5348 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5349 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5352 fold_real_zero_addition_p (tree type, tree addend, int negate)
5354 if (!real_zerop (addend))
5357 /* Don't allow the fold with -fsignaling-nans. */
5358 if (HONOR_SNANS (TYPE_MODE (type)))
5361 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5362 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5365 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5366 if (TREE_CODE (addend) == REAL_CST
5367 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5370 /* The mode has signed zeros, and we have to honor their sign.
5371 In this situation, there is only one case we can return true for.
5372 X - 0 is the same as X unless rounding towards -infinity is
5374 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5377 /* Subroutine of fold() that checks comparisons of built-in math
5378 functions against real constants.
5380 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5381 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5382 is the type of the result and ARG0 and ARG1 are the operands of the
5383 comparison. ARG1 must be a TREE_REAL_CST.
5385 The function returns the constant folded tree if a simplification
5386 can be made, and NULL_TREE otherwise. */
5389 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5390 tree type, tree arg0, tree arg1)
5394 if (BUILTIN_SQRT_P (fcode))
5396 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5397 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5399 c = TREE_REAL_CST (arg1);
5400 if (REAL_VALUE_NEGATIVE (c))
5402 /* sqrt(x) < y is always false, if y is negative. */
5403 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5404 return omit_one_operand (type, integer_zero_node, arg);
5406 /* sqrt(x) > y is always true, if y is negative and we
5407 don't care about NaNs, i.e. negative values of x. */
5408 if (code == NE_EXPR || !HONOR_NANS (mode))
5409 return omit_one_operand (type, integer_one_node, arg);
5411 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5412 return fold (build2 (GE_EXPR, type, arg,
5413 build_real (TREE_TYPE (arg), dconst0)));
5415 else if (code == GT_EXPR || code == GE_EXPR)
5419 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5420 real_convert (&c2, mode, &c2);
5422 if (REAL_VALUE_ISINF (c2))
5424 /* sqrt(x) > y is x == +Inf, when y is very large. */
5425 if (HONOR_INFINITIES (mode))
5426 return fold (build2 (EQ_EXPR, type, arg,
5427 build_real (TREE_TYPE (arg), c2)));
5429 /* sqrt(x) > y is always false, when y is very large
5430 and we don't care about infinities. */
5431 return omit_one_operand (type, integer_zero_node, arg);
5434 /* sqrt(x) > c is the same as x > c*c. */
5435 return fold (build2 (code, type, arg,
5436 build_real (TREE_TYPE (arg), c2)));
5438 else if (code == LT_EXPR || code == LE_EXPR)
5442 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5443 real_convert (&c2, mode, &c2);
5445 if (REAL_VALUE_ISINF (c2))
5447 /* sqrt(x) < y is always true, when y is a very large
5448 value and we don't care about NaNs or Infinities. */
5449 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5450 return omit_one_operand (type, integer_one_node, arg);
5452 /* sqrt(x) < y is x != +Inf when y is very large and we
5453 don't care about NaNs. */
5454 if (! HONOR_NANS (mode))
5455 return fold (build2 (NE_EXPR, type, arg,
5456 build_real (TREE_TYPE (arg), c2)));
5458 /* sqrt(x) < y is x >= 0 when y is very large and we
5459 don't care about Infinities. */
5460 if (! HONOR_INFINITIES (mode))
5461 return fold (build2 (GE_EXPR, type, arg,
5462 build_real (TREE_TYPE (arg), dconst0)));
5464 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5465 if (lang_hooks.decls.global_bindings_p () != 0
5466 || CONTAINS_PLACEHOLDER_P (arg))
5469 arg = save_expr (arg);
5470 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5471 fold (build2 (GE_EXPR, type, arg,
5472 build_real (TREE_TYPE (arg),
5474 fold (build2 (NE_EXPR, type, arg,
5475 build_real (TREE_TYPE (arg),
5479 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5480 if (! HONOR_NANS (mode))
5481 return fold (build2 (code, type, arg,
5482 build_real (TREE_TYPE (arg), c2)));
5484 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5485 if (lang_hooks.decls.global_bindings_p () == 0
5486 && ! CONTAINS_PLACEHOLDER_P (arg))
5488 arg = save_expr (arg);
5489 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5490 fold (build2 (GE_EXPR, type, arg,
5491 build_real (TREE_TYPE (arg),
5493 fold (build2 (code, type, arg,
5494 build_real (TREE_TYPE (arg),
5503 /* Subroutine of fold() that optimizes comparisons against Infinities,
5504 either +Inf or -Inf.
5506 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5507 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5508 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5510 The function returns the constant folded tree if a simplification
5511 can be made, and NULL_TREE otherwise. */
5514 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5516 enum machine_mode mode;
5517 REAL_VALUE_TYPE max;
5521 mode = TYPE_MODE (TREE_TYPE (arg0));
5523 /* For negative infinity swap the sense of the comparison. */
5524 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5526 code = swap_tree_comparison (code);
5531 /* x > +Inf is always false, if with ignore sNANs. */
5532 if (HONOR_SNANS (mode))
5534 return omit_one_operand (type, integer_zero_node, arg0);
5537 /* x <= +Inf is always true, if we don't case about NaNs. */
5538 if (! HONOR_NANS (mode))
5539 return omit_one_operand (type, integer_one_node, arg0);
5541 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5542 if (lang_hooks.decls.global_bindings_p () == 0
5543 && ! CONTAINS_PLACEHOLDER_P (arg0))
5545 arg0 = save_expr (arg0);
5546 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5552 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5553 real_maxval (&max, neg, mode);
5554 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5555 arg0, build_real (TREE_TYPE (arg0), max)));
5558 /* x < +Inf is always equal to x <= DBL_MAX. */
5559 real_maxval (&max, neg, mode);
5560 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5561 arg0, build_real (TREE_TYPE (arg0), max)));
5564 /* x != +Inf is always equal to !(x > DBL_MAX). */
5565 real_maxval (&max, neg, mode);
5566 if (! HONOR_NANS (mode))
5567 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5568 arg0, build_real (TREE_TYPE (arg0), max)));
5570 /* The transformation below creates non-gimple code and thus is
5571 not appropriate if we are in gimple form. */
5575 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5576 arg0, build_real (TREE_TYPE (arg0), max)));
5577 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5586 /* Subroutine of fold() that optimizes comparisons of a division by
5587 a nonzero integer constant against an integer constant, i.e.
5590 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5591 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5592 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5594 The function returns the constant folded tree if a simplification
5595 can be made, and NULL_TREE otherwise. */
5598 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5600 tree prod, tmp, hi, lo;
5601 tree arg00 = TREE_OPERAND (arg0, 0);
5602 tree arg01 = TREE_OPERAND (arg0, 1);
5603 unsigned HOST_WIDE_INT lpart;
5604 HOST_WIDE_INT hpart;
5607 /* We have to do this the hard way to detect unsigned overflow.
5608 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5609 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5610 TREE_INT_CST_HIGH (arg01),
5611 TREE_INT_CST_LOW (arg1),
5612 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5613 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5614 prod = force_fit_type (prod, -1, overflow, false);
5616 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5618 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5621 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5622 overflow = add_double (TREE_INT_CST_LOW (prod),
5623 TREE_INT_CST_HIGH (prod),
5624 TREE_INT_CST_LOW (tmp),
5625 TREE_INT_CST_HIGH (tmp),
5627 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5628 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5629 TREE_CONSTANT_OVERFLOW (prod));
5631 else if (tree_int_cst_sgn (arg01) >= 0)
5633 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5634 switch (tree_int_cst_sgn (arg1))
5637 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5642 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5647 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5657 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5658 switch (tree_int_cst_sgn (arg1))
5661 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5666 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5671 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5683 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5684 return omit_one_operand (type, integer_zero_node, arg00);
5685 if (TREE_OVERFLOW (hi))
5686 return fold (build2 (GE_EXPR, type, arg00, lo));
5687 if (TREE_OVERFLOW (lo))
5688 return fold (build2 (LE_EXPR, type, arg00, hi));
5689 return build_range_check (type, arg00, 1, lo, hi);
5692 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5693 return omit_one_operand (type, integer_one_node, arg00);
5694 if (TREE_OVERFLOW (hi))
5695 return fold (build2 (LT_EXPR, type, arg00, lo));
5696 if (TREE_OVERFLOW (lo))
5697 return fold (build2 (GT_EXPR, type, arg00, hi));
5698 return build_range_check (type, arg00, 0, lo, hi);
5701 if (TREE_OVERFLOW (lo))
5702 return omit_one_operand (type, integer_zero_node, arg00);
5703 return fold (build2 (LT_EXPR, type, arg00, lo));
5706 if (TREE_OVERFLOW (hi))
5707 return omit_one_operand (type, integer_one_node, arg00);
5708 return fold (build2 (LE_EXPR, type, arg00, hi));
5711 if (TREE_OVERFLOW (hi))
5712 return omit_one_operand (type, integer_zero_node, arg00);
5713 return fold (build2 (GT_EXPR, type, arg00, hi));
5716 if (TREE_OVERFLOW (lo))
5717 return omit_one_operand (type, integer_one_node, arg00);
5718 return fold (build2 (GE_EXPR, type, arg00, lo));
5728 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5729 equality/inequality test, then return a simplified form of
5730 the test using shifts and logical operations. Otherwise return
5731 NULL. TYPE is the desired result type. */
5734 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5737 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5739 if (code == TRUTH_NOT_EXPR)
5741 code = TREE_CODE (arg0);
5742 if (code != NE_EXPR && code != EQ_EXPR)
5745 /* Extract the arguments of the EQ/NE. */
5746 arg1 = TREE_OPERAND (arg0, 1);
5747 arg0 = TREE_OPERAND (arg0, 0);
5749 /* This requires us to invert the code. */
5750 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5753 /* If this is testing a single bit, we can optimize the test. */
5754 if ((code == NE_EXPR || code == EQ_EXPR)
5755 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5756 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5758 tree inner = TREE_OPERAND (arg0, 0);
5759 tree type = TREE_TYPE (arg0);
5760 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5761 enum machine_mode operand_mode = TYPE_MODE (type);
5763 tree signed_type, unsigned_type, intermediate_type;
5766 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5767 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5768 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5769 if (arg00 != NULL_TREE
5770 /* This is only a win if casting to a signed type is cheap,
5771 i.e. when arg00's type is not a partial mode. */
5772 && TYPE_PRECISION (TREE_TYPE (arg00))
5773 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5775 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5776 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5777 result_type, fold_convert (stype, arg00),
5778 fold_convert (stype, integer_zero_node)));
5781 /* Otherwise we have (A & C) != 0 where C is a single bit,
5782 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5783 Similarly for (A & C) == 0. */
5785 /* If INNER is a right shift of a constant and it plus BITNUM does
5786 not overflow, adjust BITNUM and INNER. */
5787 if (TREE_CODE (inner) == RSHIFT_EXPR
5788 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5789 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5790 && bitnum < TYPE_PRECISION (type)
5791 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5792 bitnum - TYPE_PRECISION (type)))
5794 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5795 inner = TREE_OPERAND (inner, 0);
5798 /* If we are going to be able to omit the AND below, we must do our
5799 operations as unsigned. If we must use the AND, we have a choice.
5800 Normally unsigned is faster, but for some machines signed is. */
5801 #ifdef LOAD_EXTEND_OP
5802 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5807 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5808 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5809 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5810 inner = fold_convert (intermediate_type, inner);
5813 inner = build2 (RSHIFT_EXPR, intermediate_type,
5814 inner, size_int (bitnum));
5816 if (code == EQ_EXPR)
5817 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5818 inner, integer_one_node));
5820 /* Put the AND last so it can combine with more things. */
5821 inner = build2 (BIT_AND_EXPR, intermediate_type,
5822 inner, integer_one_node);
5824 /* Make sure to return the proper type. */
5825 inner = fold_convert (result_type, inner);
5832 /* Check whether we are allowed to reorder operands arg0 and arg1,
5833 such that the evaluation of arg1 occurs before arg0. */
5836 reorder_operands_p (tree arg0, tree arg1)
5838 if (! flag_evaluation_order)
5840 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5842 return ! TREE_SIDE_EFFECTS (arg0)
5843 && ! TREE_SIDE_EFFECTS (arg1);
5846 /* Test whether it is preferable two swap two operands, ARG0 and
5847 ARG1, for example because ARG0 is an integer constant and ARG1
5848 isn't. If REORDER is true, only recommend swapping if we can
5849 evaluate the operands in reverse order. */
5852 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5854 STRIP_SIGN_NOPS (arg0);
5855 STRIP_SIGN_NOPS (arg1);
5857 if (TREE_CODE (arg1) == INTEGER_CST)
5859 if (TREE_CODE (arg0) == INTEGER_CST)
5862 if (TREE_CODE (arg1) == REAL_CST)
5864 if (TREE_CODE (arg0) == REAL_CST)
5867 if (TREE_CODE (arg1) == COMPLEX_CST)
5869 if (TREE_CODE (arg0) == COMPLEX_CST)
5872 if (TREE_CONSTANT (arg1))
5874 if (TREE_CONSTANT (arg0))
5880 if (reorder && flag_evaluation_order
5881 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5889 if (reorder && flag_evaluation_order
5890 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5898 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5899 for commutative and comparison operators. Ensuring a canonical
5900 form allows the optimizers to find additional redundancies without
5901 having to explicitly check for both orderings. */
5902 if (TREE_CODE (arg0) == SSA_NAME
5903 && TREE_CODE (arg1) == SSA_NAME
5904 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5910 /* Perform constant folding and related simplification of EXPR.
5911 The related simplifications include x*1 => x, x*0 => 0, etc.,
5912 and application of the associative law.
5913 NOP_EXPR conversions may be removed freely (as long as we
5914 are careful not to change the type of the overall expression).
5915 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5916 but we can constant-fold them if they have constant operands. */
5918 #ifdef ENABLE_FOLD_CHECKING
5919 # define fold(x) fold_1 (x)
5920 static tree fold_1 (tree);
5926 const tree t = expr;
5927 const tree type = TREE_TYPE (expr);
5928 tree t1 = NULL_TREE;
5930 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5931 enum tree_code code = TREE_CODE (t);
5932 int kind = TREE_CODE_CLASS (code);
5934 /* WINS will be nonzero when the switch is done
5935 if all operands are constant. */
5938 /* Return right away if a constant. */
5942 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5946 /* Special case for conversion ops that can have fixed point args. */
5947 arg0 = TREE_OPERAND (t, 0);
5949 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5951 STRIP_SIGN_NOPS (arg0);
5953 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5954 subop = TREE_REALPART (arg0);
5958 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5959 && TREE_CODE (subop) != REAL_CST)
5960 /* Note that TREE_CONSTANT isn't enough:
5961 static var addresses are constant but we can't
5962 do arithmetic on them. */
5965 else if (IS_EXPR_CODE_CLASS (kind))
5967 int len = first_rtl_op (code);
5969 for (i = 0; i < len; i++)
5971 tree op = TREE_OPERAND (t, i);
5975 continue; /* Valid for CALL_EXPR, at least. */
5977 /* Strip any conversions that don't change the mode. This is
5978 safe for every expression, except for a comparison expression
5979 because its signedness is derived from its operands. So, in
5980 the latter case, only strip conversions that don't change the
5983 Note that this is done as an internal manipulation within the
5984 constant folder, in order to find the simplest representation
5985 of the arguments so that their form can be studied. In any
5986 cases, the appropriate type conversions should be put back in
5987 the tree that will get out of the constant folder. */
5989 STRIP_SIGN_NOPS (op);
5993 if (TREE_CODE (op) == COMPLEX_CST)
5994 subop = TREE_REALPART (op);
5998 if (TREE_CODE (subop) != INTEGER_CST
5999 && TREE_CODE (subop) != REAL_CST)
6000 /* Note that TREE_CONSTANT isn't enough:
6001 static var addresses are constant but we can't
6002 do arithmetic on them. */
6012 /* If this is a commutative operation, and ARG0 is a constant, move it
6013 to ARG1 to reduce the number of tests below. */
6014 if (commutative_tree_code (code)
6015 && tree_swap_operands_p (arg0, arg1, true))
6016 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6017 TREE_OPERAND (t, 0)));
6019 /* Now WINS is set as described above,
6020 ARG0 is the first operand of EXPR,
6021 and ARG1 is the second operand (if it has more than one operand).
6023 First check for cases where an arithmetic operation is applied to a
6024 compound, conditional, or comparison operation. Push the arithmetic
6025 operation inside the compound or conditional to see if any folding
6026 can then be done. Convert comparison to conditional for this purpose.
6027 The also optimizes non-constant cases that used to be done in
6030 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6031 one of the operands is a comparison and the other is a comparison, a
6032 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6033 code below would make the expression more complex. Change it to a
6034 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6035 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6037 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6038 || code == EQ_EXPR || code == NE_EXPR)
6039 && ((truth_value_p (TREE_CODE (arg0))
6040 && (truth_value_p (TREE_CODE (arg1))
6041 || (TREE_CODE (arg1) == BIT_AND_EXPR
6042 && integer_onep (TREE_OPERAND (arg1, 1)))))
6043 || (truth_value_p (TREE_CODE (arg1))
6044 && (truth_value_p (TREE_CODE (arg0))
6045 || (TREE_CODE (arg0) == BIT_AND_EXPR
6046 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6048 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6049 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6051 type, fold_convert (boolean_type_node, arg0),
6052 fold_convert (boolean_type_node, arg1)));
6054 if (code == EQ_EXPR)
6055 tem = invert_truthvalue (tem);
6060 if (TREE_CODE_CLASS (code) == '1')
6062 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6063 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6064 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6065 else if (TREE_CODE (arg0) == COND_EXPR)
6067 tree arg01 = TREE_OPERAND (arg0, 1);
6068 tree arg02 = TREE_OPERAND (arg0, 2);
6069 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6070 arg01 = fold (build1 (code, type, arg01));
6071 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6072 arg02 = fold (build1 (code, type, arg02));
6073 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6076 /* If this was a conversion, and all we did was to move into
6077 inside the COND_EXPR, bring it back out. But leave it if
6078 it is a conversion from integer to integer and the
6079 result precision is no wider than a word since such a
6080 conversion is cheap and may be optimized away by combine,
6081 while it couldn't if it were outside the COND_EXPR. Then return
6082 so we don't get into an infinite recursion loop taking the
6083 conversion out and then back in. */
6085 if ((code == NOP_EXPR || code == CONVERT_EXPR
6086 || code == NON_LVALUE_EXPR)
6087 && TREE_CODE (tem) == COND_EXPR
6088 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6089 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6090 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6091 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6092 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6093 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6094 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6096 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6097 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6098 tem = build1 (code, type,
6100 TREE_TYPE (TREE_OPERAND
6101 (TREE_OPERAND (tem, 1), 0)),
6102 TREE_OPERAND (tem, 0),
6103 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6104 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6107 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6109 if (TREE_CODE (type) == BOOLEAN_TYPE)
6111 arg0 = copy_node (arg0);
6112 TREE_TYPE (arg0) = type;
6115 else if (TREE_CODE (type) != INTEGER_TYPE)
6116 return fold (build3 (COND_EXPR, type, arg0,
6117 fold (build1 (code, type,
6119 fold (build1 (code, type,
6120 integer_zero_node))));
6123 else if (TREE_CODE_CLASS (code) == '<'
6124 && TREE_CODE (arg0) == COMPOUND_EXPR)
6125 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6126 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6127 else if (TREE_CODE_CLASS (code) == '<'
6128 && TREE_CODE (arg1) == COMPOUND_EXPR)
6129 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6130 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6131 else if (TREE_CODE_CLASS (code) == '2'
6132 || TREE_CODE_CLASS (code) == '<')
6134 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6135 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6136 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6138 if (TREE_CODE (arg1) == COMPOUND_EXPR
6139 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6140 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6141 fold (build2 (code, type,
6142 arg0, TREE_OPERAND (arg1, 1))));
6144 if (TREE_CODE (arg0) == COND_EXPR
6145 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6147 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6148 /*cond_first_p=*/1);
6149 if (tem != NULL_TREE)
6153 if (TREE_CODE (arg1) == COND_EXPR
6154 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6156 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6157 /*cond_first_p=*/0);
6158 if (tem != NULL_TREE)
6166 return fold (DECL_INITIAL (t));
6171 case FIX_TRUNC_EXPR:
6173 case FIX_FLOOR_EXPR:
6174 case FIX_ROUND_EXPR:
6175 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6176 return TREE_OPERAND (t, 0);
6178 /* Handle cases of two conversions in a row. */
6179 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6180 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6182 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6183 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6184 int inside_int = INTEGRAL_TYPE_P (inside_type);
6185 int inside_ptr = POINTER_TYPE_P (inside_type);
6186 int inside_float = FLOAT_TYPE_P (inside_type);
6187 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6188 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6189 int inter_int = INTEGRAL_TYPE_P (inter_type);
6190 int inter_ptr = POINTER_TYPE_P (inter_type);
6191 int inter_float = FLOAT_TYPE_P (inter_type);
6192 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6193 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6194 int final_int = INTEGRAL_TYPE_P (type);
6195 int final_ptr = POINTER_TYPE_P (type);
6196 int final_float = FLOAT_TYPE_P (type);
6197 unsigned int final_prec = TYPE_PRECISION (type);
6198 int final_unsignedp = TYPE_UNSIGNED (type);
6200 /* In addition to the cases of two conversions in a row
6201 handled below, if we are converting something to its own
6202 type via an object of identical or wider precision, neither
6203 conversion is needed. */
6204 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6205 && ((inter_int && final_int) || (inter_float && final_float))
6206 && inter_prec >= final_prec)
6207 return fold (build1 (code, type,
6208 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6210 /* Likewise, if the intermediate and final types are either both
6211 float or both integer, we don't need the middle conversion if
6212 it is wider than the final type and doesn't change the signedness
6213 (for integers). Avoid this if the final type is a pointer
6214 since then we sometimes need the inner conversion. Likewise if
6215 the outer has a precision not equal to the size of its mode. */
6216 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6217 || (inter_float && inside_float))
6218 && inter_prec >= inside_prec
6219 && (inter_float || inter_unsignedp == inside_unsignedp)
6220 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6221 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6223 return fold (build1 (code, type,
6224 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6226 /* If we have a sign-extension of a zero-extended value, we can
6227 replace that by a single zero-extension. */
6228 if (inside_int && inter_int && final_int
6229 && inside_prec < inter_prec && inter_prec < final_prec
6230 && inside_unsignedp && !inter_unsignedp)
6231 return fold (build1 (code, type,
6232 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6234 /* Two conversions in a row are not needed unless:
6235 - some conversion is floating-point (overstrict for now), or
6236 - the intermediate type is narrower than both initial and
6238 - the intermediate type and innermost type differ in signedness,
6239 and the outermost type is wider than the intermediate, or
6240 - the initial type is a pointer type and the precisions of the
6241 intermediate and final types differ, or
6242 - the final type is a pointer type and the precisions of the
6243 initial and intermediate types differ. */
6244 if (! inside_float && ! inter_float && ! final_float
6245 && (inter_prec > inside_prec || inter_prec > final_prec)
6246 && ! (inside_int && inter_int
6247 && inter_unsignedp != inside_unsignedp
6248 && inter_prec < final_prec)
6249 && ((inter_unsignedp && inter_prec > inside_prec)
6250 == (final_unsignedp && final_prec > inter_prec))
6251 && ! (inside_ptr && inter_prec != final_prec)
6252 && ! (final_ptr && inside_prec != inter_prec)
6253 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6254 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6256 return fold (build1 (code, type,
6257 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6260 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6261 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6262 /* Detect assigning a bitfield. */
6263 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6264 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6266 /* Don't leave an assignment inside a conversion
6267 unless assigning a bitfield. */
6268 tree prev = TREE_OPERAND (t, 0);
6269 tem = copy_node (t);
6270 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6271 /* First do the assignment, then return converted constant. */
6272 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6273 TREE_NO_WARNING (tem) = 1;
6274 TREE_USED (tem) = 1;
6278 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6279 constants (if x has signed type, the sign bit cannot be set
6280 in c). This folds extension into the BIT_AND_EXPR. */
6281 if (INTEGRAL_TYPE_P (type)
6282 && TREE_CODE (type) != BOOLEAN_TYPE
6283 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6284 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6286 tree and = TREE_OPERAND (t, 0);
6287 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6290 if (TYPE_UNSIGNED (TREE_TYPE (and))
6291 || (TYPE_PRECISION (type)
6292 <= TYPE_PRECISION (TREE_TYPE (and))))
6294 else if (TYPE_PRECISION (TREE_TYPE (and1))
6295 <= HOST_BITS_PER_WIDE_INT
6296 && host_integerp (and1, 1))
6298 unsigned HOST_WIDE_INT cst;
6300 cst = tree_low_cst (and1, 1);
6301 cst &= (HOST_WIDE_INT) -1
6302 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6303 change = (cst == 0);
6304 #ifdef LOAD_EXTEND_OP
6306 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6309 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6310 and0 = fold_convert (uns, and0);
6311 and1 = fold_convert (uns, and1);
6316 return fold (build2 (BIT_AND_EXPR, type,
6317 fold_convert (type, and0),
6318 fold_convert (type, and1)));
6321 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6322 T2 being pointers to types of the same size. */
6323 if (POINTER_TYPE_P (TREE_TYPE (t))
6324 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6325 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6326 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6328 tree arg00 = TREE_OPERAND (arg0, 0);
6329 tree t0 = TREE_TYPE (t);
6330 tree t1 = TREE_TYPE (arg00);
6331 tree tt0 = TREE_TYPE (t0);
6332 tree tt1 = TREE_TYPE (t1);
6333 tree s0 = TYPE_SIZE (tt0);
6334 tree s1 = TYPE_SIZE (tt1);
6336 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6337 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6338 TREE_OPERAND (arg0, 1));
6341 tem = fold_convert_const (code, type, arg0);
6342 return tem ? tem : t;
6344 case VIEW_CONVERT_EXPR:
6345 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6346 return build1 (VIEW_CONVERT_EXPR, type,
6347 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6351 if (TREE_CODE (arg0) == CONSTRUCTOR
6352 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6354 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6356 return TREE_VALUE (m);
6361 if (TREE_CONSTANT (t) != wins)
6363 tem = copy_node (t);
6364 TREE_CONSTANT (tem) = wins;
6365 TREE_INVARIANT (tem) = wins;
6371 if (negate_expr_p (arg0))
6372 return fold_convert (type, negate_expr (arg0));
6376 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6377 return fold_abs_const (arg0, type);
6378 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6379 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6380 /* Convert fabs((double)float) into (double)fabsf(float). */
6381 else if (TREE_CODE (arg0) == NOP_EXPR
6382 && TREE_CODE (type) == REAL_TYPE)
6384 tree targ0 = strip_float_extensions (arg0);
6386 return fold_convert (type, fold (build1 (ABS_EXPR,
6390 else if (tree_expr_nonnegative_p (arg0))
6395 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6396 return fold_convert (type, arg0);
6397 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6398 return build2 (COMPLEX_EXPR, type,
6399 TREE_OPERAND (arg0, 0),
6400 negate_expr (TREE_OPERAND (arg0, 1)));
6401 else if (TREE_CODE (arg0) == COMPLEX_CST)
6402 return build_complex (type, TREE_REALPART (arg0),
6403 negate_expr (TREE_IMAGPART (arg0)));
6404 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6405 return fold (build2 (TREE_CODE (arg0), type,
6406 fold (build1 (CONJ_EXPR, type,
6407 TREE_OPERAND (arg0, 0))),
6408 fold (build1 (CONJ_EXPR, type,
6409 TREE_OPERAND (arg0, 1)))));
6410 else if (TREE_CODE (arg0) == CONJ_EXPR)
6411 return TREE_OPERAND (arg0, 0);
6415 if (TREE_CODE (arg0) == INTEGER_CST)
6416 return fold_not_const (arg0, type);
6417 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6418 return TREE_OPERAND (arg0, 0);
6422 /* A + (-B) -> A - B */
6423 if (TREE_CODE (arg1) == NEGATE_EXPR)
6424 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6425 /* (-A) + B -> B - A */
6426 if (TREE_CODE (arg0) == NEGATE_EXPR
6427 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6428 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6429 if (! FLOAT_TYPE_P (type))
6431 if (integer_zerop (arg1))
6432 return non_lvalue (fold_convert (type, arg0));
6434 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6435 with a constant, and the two constants have no bits in common,
6436 we should treat this as a BIT_IOR_EXPR since this may produce more
6438 if (TREE_CODE (arg0) == BIT_AND_EXPR
6439 && TREE_CODE (arg1) == BIT_AND_EXPR
6440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6441 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6442 && integer_zerop (const_binop (BIT_AND_EXPR,
6443 TREE_OPERAND (arg0, 1),
6444 TREE_OPERAND (arg1, 1), 0)))
6446 code = BIT_IOR_EXPR;
6450 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6451 (plus (plus (mult) (mult)) (foo)) so that we can
6452 take advantage of the factoring cases below. */
6453 if ((TREE_CODE (arg0) == PLUS_EXPR
6454 && TREE_CODE (arg1) == MULT_EXPR)
6455 || (TREE_CODE (arg1) == PLUS_EXPR
6456 && TREE_CODE (arg0) == MULT_EXPR))
6458 tree parg0, parg1, parg, marg;
6460 if (TREE_CODE (arg0) == PLUS_EXPR)
6461 parg = arg0, marg = arg1;
6463 parg = arg1, marg = arg0;
6464 parg0 = TREE_OPERAND (parg, 0);
6465 parg1 = TREE_OPERAND (parg, 1);
6469 if (TREE_CODE (parg0) == MULT_EXPR
6470 && TREE_CODE (parg1) != MULT_EXPR)
6471 return fold (build2 (PLUS_EXPR, type,
6472 fold (build2 (PLUS_EXPR, type,
6473 fold_convert (type, parg0),
6474 fold_convert (type, marg))),
6475 fold_convert (type, parg1)));
6476 if (TREE_CODE (parg0) != MULT_EXPR
6477 && TREE_CODE (parg1) == MULT_EXPR)
6478 return fold (build2 (PLUS_EXPR, type,
6479 fold (build2 (PLUS_EXPR, type,
6480 fold_convert (type, parg1),
6481 fold_convert (type, marg))),
6482 fold_convert (type, parg0)));
6485 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6487 tree arg00, arg01, arg10, arg11;
6488 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6490 /* (A * C) + (B * C) -> (A+B) * C.
6491 We are most concerned about the case where C is a constant,
6492 but other combinations show up during loop reduction. Since
6493 it is not difficult, try all four possibilities. */
6495 arg00 = TREE_OPERAND (arg0, 0);
6496 arg01 = TREE_OPERAND (arg0, 1);
6497 arg10 = TREE_OPERAND (arg1, 0);
6498 arg11 = TREE_OPERAND (arg1, 1);
6501 if (operand_equal_p (arg01, arg11, 0))
6502 same = arg01, alt0 = arg00, alt1 = arg10;
6503 else if (operand_equal_p (arg00, arg10, 0))
6504 same = arg00, alt0 = arg01, alt1 = arg11;
6505 else if (operand_equal_p (arg00, arg11, 0))
6506 same = arg00, alt0 = arg01, alt1 = arg10;
6507 else if (operand_equal_p (arg01, arg10, 0))
6508 same = arg01, alt0 = arg00, alt1 = arg11;
6510 /* No identical multiplicands; see if we can find a common
6511 power-of-two factor in non-power-of-two multiplies. This
6512 can help in multi-dimensional array access. */
6513 else if (TREE_CODE (arg01) == INTEGER_CST
6514 && TREE_CODE (arg11) == INTEGER_CST
6515 && TREE_INT_CST_HIGH (arg01) == 0
6516 && TREE_INT_CST_HIGH (arg11) == 0)
6518 HOST_WIDE_INT int01, int11, tmp;
6519 int01 = TREE_INT_CST_LOW (arg01);
6520 int11 = TREE_INT_CST_LOW (arg11);
6522 /* Move min of absolute values to int11. */
6523 if ((int01 >= 0 ? int01 : -int01)
6524 < (int11 >= 0 ? int11 : -int11))
6526 tmp = int01, int01 = int11, int11 = tmp;
6527 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6528 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6531 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6533 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6534 build_int_cst (NULL_TREE,
6542 return fold (build2 (MULT_EXPR, type,
6543 fold (build2 (PLUS_EXPR, type,
6550 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6551 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6552 return non_lvalue (fold_convert (type, arg0));
6554 /* Likewise if the operands are reversed. */
6555 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6556 return non_lvalue (fold_convert (type, arg1));
6558 /* Convert X + -C into X - C. */
6559 if (TREE_CODE (arg1) == REAL_CST
6560 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6562 tem = fold_negate_const (arg1, type);
6563 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6564 return fold (build2 (MINUS_EXPR, type,
6565 fold_convert (type, arg0),
6566 fold_convert (type, tem)));
6569 /* Convert x+x into x*2.0. */
6570 if (operand_equal_p (arg0, arg1, 0)
6571 && SCALAR_FLOAT_TYPE_P (type))
6572 return fold (build2 (MULT_EXPR, type, arg0,
6573 build_real (type, dconst2)));
6575 /* Convert x*c+x into x*(c+1). */
6576 if (flag_unsafe_math_optimizations
6577 && TREE_CODE (arg0) == MULT_EXPR
6578 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6579 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6584 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6585 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6586 return fold (build2 (MULT_EXPR, type, arg1,
6587 build_real (type, c)));
6590 /* Convert x+x*c into x*(c+1). */
6591 if (flag_unsafe_math_optimizations
6592 && TREE_CODE (arg1) == MULT_EXPR
6593 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6594 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6595 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6599 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6600 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6601 return fold (build2 (MULT_EXPR, type, arg0,
6602 build_real (type, c)));
6605 /* Convert x*c1+x*c2 into x*(c1+c2). */
6606 if (flag_unsafe_math_optimizations
6607 && TREE_CODE (arg0) == MULT_EXPR
6608 && TREE_CODE (arg1) == MULT_EXPR
6609 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6610 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6611 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6612 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6613 && operand_equal_p (TREE_OPERAND (arg0, 0),
6614 TREE_OPERAND (arg1, 0), 0))
6616 REAL_VALUE_TYPE c1, c2;
6618 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6619 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6620 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6621 return fold (build2 (MULT_EXPR, type,
6622 TREE_OPERAND (arg0, 0),
6623 build_real (type, c1)));
6625 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6626 if (flag_unsafe_math_optimizations
6627 && TREE_CODE (arg1) == PLUS_EXPR
6628 && TREE_CODE (arg0) != MULT_EXPR)
6630 tree tree10 = TREE_OPERAND (arg1, 0);
6631 tree tree11 = TREE_OPERAND (arg1, 1);
6632 if (TREE_CODE (tree11) == MULT_EXPR
6633 && TREE_CODE (tree10) == MULT_EXPR)
6636 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6637 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6640 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6641 if (flag_unsafe_math_optimizations
6642 && TREE_CODE (arg0) == PLUS_EXPR
6643 && TREE_CODE (arg1) != MULT_EXPR)
6645 tree tree00 = TREE_OPERAND (arg0, 0);
6646 tree tree01 = TREE_OPERAND (arg0, 1);
6647 if (TREE_CODE (tree01) == MULT_EXPR
6648 && TREE_CODE (tree00) == MULT_EXPR)
6651 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6652 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6658 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6659 is a rotate of A by C1 bits. */
6660 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6661 is a rotate of A by B bits. */
6663 enum tree_code code0, code1;
6664 code0 = TREE_CODE (arg0);
6665 code1 = TREE_CODE (arg1);
6666 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6667 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6668 && operand_equal_p (TREE_OPERAND (arg0, 0),
6669 TREE_OPERAND (arg1, 0), 0)
6670 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6672 tree tree01, tree11;
6673 enum tree_code code01, code11;
6675 tree01 = TREE_OPERAND (arg0, 1);
6676 tree11 = TREE_OPERAND (arg1, 1);
6677 STRIP_NOPS (tree01);
6678 STRIP_NOPS (tree11);
6679 code01 = TREE_CODE (tree01);
6680 code11 = TREE_CODE (tree11);
6681 if (code01 == INTEGER_CST
6682 && code11 == INTEGER_CST
6683 && TREE_INT_CST_HIGH (tree01) == 0
6684 && TREE_INT_CST_HIGH (tree11) == 0
6685 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6686 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6687 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6688 code0 == LSHIFT_EXPR ? tree01 : tree11);
6689 else if (code11 == MINUS_EXPR)
6691 tree tree110, tree111;
6692 tree110 = TREE_OPERAND (tree11, 0);
6693 tree111 = TREE_OPERAND (tree11, 1);
6694 STRIP_NOPS (tree110);
6695 STRIP_NOPS (tree111);
6696 if (TREE_CODE (tree110) == INTEGER_CST
6697 && 0 == compare_tree_int (tree110,
6699 (TREE_TYPE (TREE_OPERAND
6701 && operand_equal_p (tree01, tree111, 0))
6702 return build2 ((code0 == LSHIFT_EXPR
6705 type, TREE_OPERAND (arg0, 0), tree01);
6707 else if (code01 == MINUS_EXPR)
6709 tree tree010, tree011;
6710 tree010 = TREE_OPERAND (tree01, 0);
6711 tree011 = TREE_OPERAND (tree01, 1);
6712 STRIP_NOPS (tree010);
6713 STRIP_NOPS (tree011);
6714 if (TREE_CODE (tree010) == INTEGER_CST
6715 && 0 == compare_tree_int (tree010,
6717 (TREE_TYPE (TREE_OPERAND
6719 && operand_equal_p (tree11, tree011, 0))
6720 return build2 ((code0 != LSHIFT_EXPR
6723 type, TREE_OPERAND (arg0, 0), tree11);
6729 /* In most languages, can't associate operations on floats through
6730 parentheses. Rather than remember where the parentheses were, we
6731 don't associate floats at all, unless the user has specified
6732 -funsafe-math-optimizations. */
6735 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6737 tree var0, con0, lit0, minus_lit0;
6738 tree var1, con1, lit1, minus_lit1;
6740 /* Split both trees into variables, constants, and literals. Then
6741 associate each group together, the constants with literals,
6742 then the result with variables. This increases the chances of
6743 literals being recombined later and of generating relocatable
6744 expressions for the sum of a constant and literal. */
6745 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6746 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6747 code == MINUS_EXPR);
6749 /* Only do something if we found more than two objects. Otherwise,
6750 nothing has changed and we risk infinite recursion. */
6751 if (2 < ((var0 != 0) + (var1 != 0)
6752 + (con0 != 0) + (con1 != 0)
6753 + (lit0 != 0) + (lit1 != 0)
6754 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6756 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6757 if (code == MINUS_EXPR)
6760 var0 = associate_trees (var0, var1, code, type);
6761 con0 = associate_trees (con0, con1, code, type);
6762 lit0 = associate_trees (lit0, lit1, code, type);
6763 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6765 /* Preserve the MINUS_EXPR if the negative part of the literal is
6766 greater than the positive part. Otherwise, the multiplicative
6767 folding code (i.e extract_muldiv) may be fooled in case
6768 unsigned constants are subtracted, like in the following
6769 example: ((X*2 + 4) - 8U)/2. */
6770 if (minus_lit0 && lit0)
6772 if (TREE_CODE (lit0) == INTEGER_CST
6773 && TREE_CODE (minus_lit0) == INTEGER_CST
6774 && tree_int_cst_lt (lit0, minus_lit0))
6776 minus_lit0 = associate_trees (minus_lit0, lit0,
6782 lit0 = associate_trees (lit0, minus_lit0,
6790 return fold_convert (type,
6791 associate_trees (var0, minus_lit0,
6795 con0 = associate_trees (con0, minus_lit0,
6797 return fold_convert (type,
6798 associate_trees (var0, con0,
6803 con0 = associate_trees (con0, lit0, code, type);
6804 return fold_convert (type, associate_trees (var0, con0,
6811 t1 = const_binop (code, arg0, arg1, 0);
6812 if (t1 != NULL_TREE)
6814 /* The return value should always have
6815 the same type as the original expression. */
6816 if (TREE_TYPE (t1) != type)
6817 t1 = fold_convert (type, t1);
6824 /* A - (-B) -> A + B */
6825 if (TREE_CODE (arg1) == NEGATE_EXPR)
6826 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6827 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6828 if (TREE_CODE (arg0) == NEGATE_EXPR
6829 && (FLOAT_TYPE_P (type)
6830 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6831 && negate_expr_p (arg1)
6832 && reorder_operands_p (arg0, arg1))
6833 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6834 TREE_OPERAND (arg0, 0)));
6836 if (! FLOAT_TYPE_P (type))
6838 if (! wins && integer_zerop (arg0))
6839 return negate_expr (fold_convert (type, arg1));
6840 if (integer_zerop (arg1))
6841 return non_lvalue (fold_convert (type, arg0));
6843 /* Fold A - (A & B) into ~B & A. */
6844 if (!TREE_SIDE_EFFECTS (arg0)
6845 && TREE_CODE (arg1) == BIT_AND_EXPR)
6847 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6848 return fold (build2 (BIT_AND_EXPR, type,
6849 fold (build1 (BIT_NOT_EXPR, type,
6850 TREE_OPERAND (arg1, 0))),
6852 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6853 return fold (build2 (BIT_AND_EXPR, type,
6854 fold (build1 (BIT_NOT_EXPR, type,
6855 TREE_OPERAND (arg1, 1))),
6859 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6860 any power of 2 minus 1. */
6861 if (TREE_CODE (arg0) == BIT_AND_EXPR
6862 && TREE_CODE (arg1) == BIT_AND_EXPR
6863 && operand_equal_p (TREE_OPERAND (arg0, 0),
6864 TREE_OPERAND (arg1, 0), 0))
6866 tree mask0 = TREE_OPERAND (arg0, 1);
6867 tree mask1 = TREE_OPERAND (arg1, 1);
6868 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6870 if (operand_equal_p (tem, mask1, 0))
6872 tem = fold (build2 (BIT_XOR_EXPR, type,
6873 TREE_OPERAND (arg0, 0), mask1));
6874 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6879 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6880 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6881 return non_lvalue (fold_convert (type, arg0));
6883 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6884 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6885 (-ARG1 + ARG0) reduces to -ARG1. */
6886 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6887 return negate_expr (fold_convert (type, arg1));
6889 /* Fold &x - &x. This can happen from &x.foo - &x.
6890 This is unsafe for certain floats even in non-IEEE formats.
6891 In IEEE, it is unsafe because it does wrong for NaNs.
6892 Also note that operand_equal_p is always false if an operand
6895 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6896 && operand_equal_p (arg0, arg1, 0))
6897 return fold_convert (type, integer_zero_node);
6899 /* A - B -> A + (-B) if B is easily negatable. */
6900 if (!wins && negate_expr_p (arg1)
6901 && ((FLOAT_TYPE_P (type)
6902 /* Avoid this transformation if B is a positive REAL_CST. */
6903 && (TREE_CODE (arg1) != REAL_CST
6904 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6905 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6906 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6908 if (TREE_CODE (arg0) == MULT_EXPR
6909 && TREE_CODE (arg1) == MULT_EXPR
6910 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6912 /* (A * C) - (B * C) -> (A-B) * C. */
6913 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6914 TREE_OPERAND (arg1, 1), 0))
6915 return fold (build2 (MULT_EXPR, type,
6916 fold (build2 (MINUS_EXPR, type,
6917 TREE_OPERAND (arg0, 0),
6918 TREE_OPERAND (arg1, 0))),
6919 TREE_OPERAND (arg0, 1)));
6920 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6921 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6922 TREE_OPERAND (arg1, 0), 0))
6923 return fold (build2 (MULT_EXPR, type,
6924 TREE_OPERAND (arg0, 0),
6925 fold (build2 (MINUS_EXPR, type,
6926 TREE_OPERAND (arg0, 1),
6927 TREE_OPERAND (arg1, 1)))));
6933 /* (-A) * (-B) -> A * B */
6934 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6935 return fold (build2 (MULT_EXPR, type,
6936 TREE_OPERAND (arg0, 0),
6937 negate_expr (arg1)));
6938 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6939 return fold (build2 (MULT_EXPR, type,
6941 TREE_OPERAND (arg1, 0)));
6943 if (! FLOAT_TYPE_P (type))
6945 if (integer_zerop (arg1))
6946 return omit_one_operand (type, arg1, arg0);
6947 if (integer_onep (arg1))
6948 return non_lvalue (fold_convert (type, arg0));
6950 /* (a * (1 << b)) is (a << b) */
6951 if (TREE_CODE (arg1) == LSHIFT_EXPR
6952 && integer_onep (TREE_OPERAND (arg1, 0)))
6953 return fold (build2 (LSHIFT_EXPR, type, arg0,
6954 TREE_OPERAND (arg1, 1)));
6955 if (TREE_CODE (arg0) == LSHIFT_EXPR
6956 && integer_onep (TREE_OPERAND (arg0, 0)))
6957 return fold (build2 (LSHIFT_EXPR, type, arg1,
6958 TREE_OPERAND (arg0, 1)));
6960 if (TREE_CODE (arg1) == INTEGER_CST
6961 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6962 fold_convert (type, arg1),
6964 return fold_convert (type, tem);
6969 /* Maybe fold x * 0 to 0. The expressions aren't the same
6970 when x is NaN, since x * 0 is also NaN. Nor are they the
6971 same in modes with signed zeros, since multiplying a
6972 negative value by 0 gives -0, not +0. */
6973 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6974 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6975 && real_zerop (arg1))
6976 return omit_one_operand (type, arg1, arg0);
6977 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6978 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6979 && real_onep (arg1))
6980 return non_lvalue (fold_convert (type, arg0));
6982 /* Transform x * -1.0 into -x. */
6983 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6984 && real_minus_onep (arg1))
6985 return fold_convert (type, negate_expr (arg0));
6987 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6988 if (flag_unsafe_math_optimizations
6989 && TREE_CODE (arg0) == RDIV_EXPR
6990 && TREE_CODE (arg1) == REAL_CST
6991 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6993 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6996 return fold (build2 (RDIV_EXPR, type, tem,
6997 TREE_OPERAND (arg0, 1)));
7000 if (flag_unsafe_math_optimizations)
7002 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7003 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7005 /* Optimizations of root(...)*root(...). */
7006 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7008 tree rootfn, arg, arglist;
7009 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7010 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7012 /* Optimize sqrt(x)*sqrt(x) as x. */
7013 if (BUILTIN_SQRT_P (fcode0)
7014 && operand_equal_p (arg00, arg10, 0)
7015 && ! HONOR_SNANS (TYPE_MODE (type)))
7018 /* Optimize root(x)*root(y) as root(x*y). */
7019 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7020 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7021 arglist = build_tree_list (NULL_TREE, arg);
7022 return build_function_call_expr (rootfn, arglist);
7025 /* Optimize expN(x)*expN(y) as expN(x+y). */
7026 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7028 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7029 tree arg = build2 (PLUS_EXPR, type,
7030 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7031 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7032 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7033 return build_function_call_expr (expfn, arglist);
7036 /* Optimizations of pow(...)*pow(...). */
7037 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7038 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7039 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7041 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7042 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7044 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7045 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7048 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7049 if (operand_equal_p (arg01, arg11, 0))
7051 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7052 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7053 tree arglist = tree_cons (NULL_TREE, fold (arg),
7054 build_tree_list (NULL_TREE,
7056 return build_function_call_expr (powfn, arglist);
7059 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7060 if (operand_equal_p (arg00, arg10, 0))
7062 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7063 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7064 tree arglist = tree_cons (NULL_TREE, arg00,
7065 build_tree_list (NULL_TREE,
7067 return build_function_call_expr (powfn, arglist);
7071 /* Optimize tan(x)*cos(x) as sin(x). */
7072 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7073 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7074 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7075 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7076 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7077 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7078 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7079 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7081 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7083 if (sinfn != NULL_TREE)
7084 return build_function_call_expr (sinfn,
7085 TREE_OPERAND (arg0, 1));
7088 /* Optimize x*pow(x,c) as pow(x,c+1). */
7089 if (fcode1 == BUILT_IN_POW
7090 || fcode1 == BUILT_IN_POWF
7091 || fcode1 == BUILT_IN_POWL)
7093 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7094 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7096 if (TREE_CODE (arg11) == REAL_CST
7097 && ! TREE_CONSTANT_OVERFLOW (arg11)
7098 && operand_equal_p (arg0, arg10, 0))
7100 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7104 c = TREE_REAL_CST (arg11);
7105 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7106 arg = build_real (type, c);
7107 arglist = build_tree_list (NULL_TREE, arg);
7108 arglist = tree_cons (NULL_TREE, arg0, arglist);
7109 return build_function_call_expr (powfn, arglist);
7113 /* Optimize pow(x,c)*x as pow(x,c+1). */
7114 if (fcode0 == BUILT_IN_POW
7115 || fcode0 == BUILT_IN_POWF
7116 || fcode0 == BUILT_IN_POWL)
7118 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7119 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7121 if (TREE_CODE (arg01) == REAL_CST
7122 && ! TREE_CONSTANT_OVERFLOW (arg01)
7123 && operand_equal_p (arg1, arg00, 0))
7125 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7129 c = TREE_REAL_CST (arg01);
7130 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7131 arg = build_real (type, c);
7132 arglist = build_tree_list (NULL_TREE, arg);
7133 arglist = tree_cons (NULL_TREE, arg1, arglist);
7134 return build_function_call_expr (powfn, arglist);
7138 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7140 && operand_equal_p (arg0, arg1, 0))
7142 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7146 tree arg = build_real (type, dconst2);
7147 tree arglist = build_tree_list (NULL_TREE, arg);
7148 arglist = tree_cons (NULL_TREE, arg0, arglist);
7149 return build_function_call_expr (powfn, arglist);
7158 if (integer_all_onesp (arg1))
7159 return omit_one_operand (type, arg1, arg0);
7160 if (integer_zerop (arg1))
7161 return non_lvalue (fold_convert (type, arg0));
7162 if (operand_equal_p (arg0, arg1, 0))
7163 return non_lvalue (fold_convert (type, arg0));
7166 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7167 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7169 t1 = build_int_cst (type, -1);
7170 t1 = force_fit_type (t1, 0, false, false);
7171 return omit_one_operand (type, t1, arg1);
7175 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7176 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7178 t1 = build_int_cst (type, -1);
7179 t1 = force_fit_type (t1, 0, false, false);
7180 return omit_one_operand (type, t1, arg0);
7183 t1 = distribute_bit_expr (code, type, arg0, arg1);
7184 if (t1 != NULL_TREE)
7187 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7189 This results in more efficient code for machines without a NAND
7190 instruction. Combine will canonicalize to the first form
7191 which will allow use of NAND instructions provided by the
7192 backend if they exist. */
7193 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7194 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7196 return fold (build1 (BIT_NOT_EXPR, type,
7197 build2 (BIT_AND_EXPR, type,
7198 TREE_OPERAND (arg0, 0),
7199 TREE_OPERAND (arg1, 0))));
7202 /* See if this can be simplified into a rotate first. If that
7203 is unsuccessful continue in the association code. */
7207 if (integer_zerop (arg1))
7208 return non_lvalue (fold_convert (type, arg0));
7209 if (integer_all_onesp (arg1))
7210 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7211 if (operand_equal_p (arg0, arg1, 0))
7212 return omit_one_operand (type, integer_zero_node, arg0);
7215 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7216 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7218 t1 = build_int_cst (type, -1);
7219 t1 = force_fit_type (t1, 0, false, false);
7220 return omit_one_operand (type, t1, arg1);
7224 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7225 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7227 t1 = build_int_cst (type, -1);
7228 t1 = force_fit_type (t1, 0, false, false);
7229 return omit_one_operand (type, t1, arg0);
7232 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7233 with a constant, and the two constants have no bits in common,
7234 we should treat this as a BIT_IOR_EXPR since this may produce more
7236 if (TREE_CODE (arg0) == BIT_AND_EXPR
7237 && TREE_CODE (arg1) == BIT_AND_EXPR
7238 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7239 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7240 && integer_zerop (const_binop (BIT_AND_EXPR,
7241 TREE_OPERAND (arg0, 1),
7242 TREE_OPERAND (arg1, 1), 0)))
7244 code = BIT_IOR_EXPR;
7248 /* See if this can be simplified into a rotate first. If that
7249 is unsuccessful continue in the association code. */
7253 if (integer_all_onesp (arg1))
7254 return non_lvalue (fold_convert (type, arg0));
7255 if (integer_zerop (arg1))
7256 return omit_one_operand (type, arg1, arg0);
7257 if (operand_equal_p (arg0, arg1, 0))
7258 return non_lvalue (fold_convert (type, arg0));
7260 /* ~X & X is always zero. */
7261 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7262 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7263 return omit_one_operand (type, integer_zero_node, arg1);
7265 /* X & ~X is always zero. */
7266 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7267 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7268 return omit_one_operand (type, integer_zero_node, arg0);
7270 t1 = distribute_bit_expr (code, type, arg0, arg1);
7271 if (t1 != NULL_TREE)
7273 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7274 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7275 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7278 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7280 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7281 && (~TREE_INT_CST_LOW (arg1)
7282 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7283 return fold_convert (type, TREE_OPERAND (arg0, 0));
7286 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7288 This results in more efficient code for machines without a NOR
7289 instruction. Combine will canonicalize to the first form
7290 which will allow use of NOR instructions provided by the
7291 backend if they exist. */
7292 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7293 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7295 return fold (build1 (BIT_NOT_EXPR, type,
7296 build2 (BIT_IOR_EXPR, type,
7297 TREE_OPERAND (arg0, 0),
7298 TREE_OPERAND (arg1, 0))));
7304 /* Don't touch a floating-point divide by zero unless the mode
7305 of the constant can represent infinity. */
7306 if (TREE_CODE (arg1) == REAL_CST
7307 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7308 && real_zerop (arg1))
7311 /* (-A) / (-B) -> A / B */
7312 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7313 return fold (build2 (RDIV_EXPR, type,
7314 TREE_OPERAND (arg0, 0),
7315 negate_expr (arg1)));
7316 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7317 return fold (build2 (RDIV_EXPR, type,
7319 TREE_OPERAND (arg1, 0)));
7321 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7322 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7323 && real_onep (arg1))
7324 return non_lvalue (fold_convert (type, arg0));
7326 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7327 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7328 && real_minus_onep (arg1))
7329 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7331 /* If ARG1 is a constant, we can convert this to a multiply by the
7332 reciprocal. This does not have the same rounding properties,
7333 so only do this if -funsafe-math-optimizations. We can actually
7334 always safely do it if ARG1 is a power of two, but it's hard to
7335 tell if it is or not in a portable manner. */
7336 if (TREE_CODE (arg1) == REAL_CST)
7338 if (flag_unsafe_math_optimizations
7339 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7341 return fold (build2 (MULT_EXPR, type, arg0, tem));
7342 /* Find the reciprocal if optimizing and the result is exact. */
7346 r = TREE_REAL_CST (arg1);
7347 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7349 tem = build_real (type, r);
7350 return fold (build2 (MULT_EXPR, type, arg0, tem));
7354 /* Convert A/B/C to A/(B*C). */
7355 if (flag_unsafe_math_optimizations
7356 && TREE_CODE (arg0) == RDIV_EXPR)
7357 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7358 fold (build2 (MULT_EXPR, type,
7359 TREE_OPERAND (arg0, 1), arg1))));
7361 /* Convert A/(B/C) to (A/B)*C. */
7362 if (flag_unsafe_math_optimizations
7363 && TREE_CODE (arg1) == RDIV_EXPR)
7364 return fold (build2 (MULT_EXPR, type,
7365 fold (build2 (RDIV_EXPR, type, arg0,
7366 TREE_OPERAND (arg1, 0))),
7367 TREE_OPERAND (arg1, 1)));
7369 /* Convert C1/(X*C2) into (C1/C2)/X. */
7370 if (flag_unsafe_math_optimizations
7371 && TREE_CODE (arg1) == MULT_EXPR
7372 && TREE_CODE (arg0) == REAL_CST
7373 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7375 tree tem = const_binop (RDIV_EXPR, arg0,
7376 TREE_OPERAND (arg1, 1), 0);
7378 return fold (build2 (RDIV_EXPR, type, tem,
7379 TREE_OPERAND (arg1, 0)));
7382 if (flag_unsafe_math_optimizations)
7384 enum built_in_function fcode = builtin_mathfn_code (arg1);
7385 /* Optimize x/expN(y) into x*expN(-y). */
7386 if (BUILTIN_EXPONENT_P (fcode))
7388 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7389 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7390 tree arglist = build_tree_list (NULL_TREE,
7391 fold_convert (type, arg));
7392 arg1 = build_function_call_expr (expfn, arglist);
7393 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7396 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7397 if (fcode == BUILT_IN_POW
7398 || fcode == BUILT_IN_POWF
7399 || fcode == BUILT_IN_POWL)
7401 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7402 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7403 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7404 tree neg11 = fold_convert (type, negate_expr (arg11));
7405 tree arglist = tree_cons(NULL_TREE, arg10,
7406 build_tree_list (NULL_TREE, neg11));
7407 arg1 = build_function_call_expr (powfn, arglist);
7408 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7412 if (flag_unsafe_math_optimizations)
7414 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7415 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7417 /* Optimize sin(x)/cos(x) as tan(x). */
7418 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7419 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7420 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7421 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7422 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7424 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7426 if (tanfn != NULL_TREE)
7427 return build_function_call_expr (tanfn,
7428 TREE_OPERAND (arg0, 1));
7431 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7432 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7433 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7434 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7435 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7436 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7438 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7440 if (tanfn != NULL_TREE)
7442 tree tmp = TREE_OPERAND (arg0, 1);
7443 tmp = build_function_call_expr (tanfn, tmp);
7444 return fold (build2 (RDIV_EXPR, type,
7445 build_real (type, dconst1), tmp));
7449 /* Optimize pow(x,c)/x as pow(x,c-1). */
7450 if (fcode0 == BUILT_IN_POW
7451 || fcode0 == BUILT_IN_POWF
7452 || fcode0 == BUILT_IN_POWL)
7454 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7455 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7456 if (TREE_CODE (arg01) == REAL_CST
7457 && ! TREE_CONSTANT_OVERFLOW (arg01)
7458 && operand_equal_p (arg1, arg00, 0))
7460 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7464 c = TREE_REAL_CST (arg01);
7465 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7466 arg = build_real (type, c);
7467 arglist = build_tree_list (NULL_TREE, arg);
7468 arglist = tree_cons (NULL_TREE, arg1, arglist);
7469 return build_function_call_expr (powfn, arglist);
7475 case TRUNC_DIV_EXPR:
7476 case ROUND_DIV_EXPR:
7477 case FLOOR_DIV_EXPR:
7479 case EXACT_DIV_EXPR:
7480 if (integer_onep (arg1))
7481 return non_lvalue (fold_convert (type, arg0));
7482 if (integer_zerop (arg1))
7485 if (!TYPE_UNSIGNED (type)
7486 && TREE_CODE (arg1) == INTEGER_CST
7487 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7488 && TREE_INT_CST_HIGH (arg1) == -1)
7489 return fold_convert (type, negate_expr (arg0));
7491 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7492 operation, EXACT_DIV_EXPR.
7494 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7495 At one time others generated faster code, it's not clear if they do
7496 after the last round to changes to the DIV code in expmed.c. */
7497 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7498 && multiple_of_p (type, arg0, arg1))
7499 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7501 if (TREE_CODE (arg1) == INTEGER_CST
7502 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7504 return fold_convert (type, tem);
7509 case FLOOR_MOD_EXPR:
7510 case ROUND_MOD_EXPR:
7511 case TRUNC_MOD_EXPR:
7512 if (integer_onep (arg1))
7513 return omit_one_operand (type, integer_zero_node, arg0);
7514 if (integer_zerop (arg1))
7517 /* X % -1 is zero. */
7518 if (!TYPE_UNSIGNED (type)
7519 && TREE_CODE (arg1) == INTEGER_CST
7520 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7521 && TREE_INT_CST_HIGH (arg1) == -1)
7522 return omit_one_operand (type, integer_zero_node, arg0);
7524 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7525 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7526 if (code == TRUNC_MOD_EXPR
7527 && TYPE_UNSIGNED (type)
7528 && integer_pow2p (arg1))
7530 unsigned HOST_WIDE_INT high, low;
7534 l = tree_log2 (arg1);
7535 if (l >= HOST_BITS_PER_WIDE_INT)
7537 high = ((unsigned HOST_WIDE_INT) 1
7538 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7544 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7547 mask = build_int_cst_wide (type, low, high);
7548 return fold (build2 (BIT_AND_EXPR, type,
7549 fold_convert (type, arg0), mask));
7552 /* X % -C is the same as X % C. */
7553 if (code == TRUNC_MOD_EXPR
7554 && !TYPE_UNSIGNED (type)
7555 && TREE_CODE (arg1) == INTEGER_CST
7556 && TREE_INT_CST_HIGH (arg1) < 0
7558 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7559 && !sign_bit_p (arg1, arg1))
7560 return fold (build2 (code, type, fold_convert (type, arg0),
7561 fold_convert (type, negate_expr (arg1))));
7563 /* X % -Y is the same as X % Y. */
7564 if (code == TRUNC_MOD_EXPR
7565 && !TYPE_UNSIGNED (type)
7566 && TREE_CODE (arg1) == NEGATE_EXPR
7568 return fold (build2 (code, type, fold_convert (type, arg0),
7569 fold_convert (type, TREE_OPERAND (arg1, 0))));
7571 if (TREE_CODE (arg1) == INTEGER_CST
7572 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7574 return fold_convert (type, tem);
7580 if (integer_all_onesp (arg0))
7581 return omit_one_operand (type, arg0, arg1);
7585 /* Optimize -1 >> x for arithmetic right shifts. */
7586 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7587 return omit_one_operand (type, arg0, arg1);
7588 /* ... fall through ... */
7592 if (integer_zerop (arg1))
7593 return non_lvalue (fold_convert (type, arg0));
7594 if (integer_zerop (arg0))
7595 return omit_one_operand (type, arg0, arg1);
7597 /* Since negative shift count is not well-defined,
7598 don't try to compute it in the compiler. */
7599 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7601 /* Rewrite an LROTATE_EXPR by a constant into an
7602 RROTATE_EXPR by a new constant. */
7603 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7605 tree tem = build_int_cst (NULL_TREE,
7606 GET_MODE_BITSIZE (TYPE_MODE (type)));
7607 tem = fold_convert (TREE_TYPE (arg1), tem);
7608 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7609 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7612 /* If we have a rotate of a bit operation with the rotate count and
7613 the second operand of the bit operation both constant,
7614 permute the two operations. */
7615 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7616 && (TREE_CODE (arg0) == BIT_AND_EXPR
7617 || TREE_CODE (arg0) == BIT_IOR_EXPR
7618 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7619 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7620 return fold (build2 (TREE_CODE (arg0), type,
7621 fold (build2 (code, type,
7622 TREE_OPERAND (arg0, 0), arg1)),
7623 fold (build2 (code, type,
7624 TREE_OPERAND (arg0, 1), arg1))));
7626 /* Two consecutive rotates adding up to the width of the mode can
7628 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7629 && TREE_CODE (arg0) == RROTATE_EXPR
7630 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7631 && TREE_INT_CST_HIGH (arg1) == 0
7632 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7633 && ((TREE_INT_CST_LOW (arg1)
7634 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7635 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7636 return TREE_OPERAND (arg0, 0);
7641 if (operand_equal_p (arg0, arg1, 0))
7642 return omit_one_operand (type, arg0, arg1);
7643 if (INTEGRAL_TYPE_P (type)
7644 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7645 return omit_one_operand (type, arg1, arg0);
7649 if (operand_equal_p (arg0, arg1, 0))
7650 return omit_one_operand (type, arg0, arg1);
7651 if (INTEGRAL_TYPE_P (type)
7652 && TYPE_MAX_VALUE (type)
7653 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7654 return omit_one_operand (type, arg1, arg0);
7657 case TRUTH_NOT_EXPR:
7658 /* The argument to invert_truthvalue must have Boolean type. */
7659 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7660 arg0 = fold_convert (boolean_type_node, arg0);
7662 /* Note that the operand of this must be an int
7663 and its values must be 0 or 1.
7664 ("true" is a fixed value perhaps depending on the language,
7665 but we don't handle values other than 1 correctly yet.) */
7666 tem = invert_truthvalue (arg0);
7667 /* Avoid infinite recursion. */
7668 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7670 tem = fold_single_bit_test (code, arg0, arg1, type);
7675 return fold_convert (type, tem);
7677 case TRUTH_ANDIF_EXPR:
7678 /* Note that the operands of this must be ints
7679 and their values must be 0 or 1.
7680 ("true" is a fixed value perhaps depending on the language.) */
7681 /* If first arg is constant zero, return it. */
7682 if (integer_zerop (arg0))
7683 return fold_convert (type, arg0);
7684 case TRUTH_AND_EXPR:
7685 /* If either arg is constant true, drop it. */
7686 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7687 return non_lvalue (fold_convert (type, arg1));
7688 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7689 /* Preserve sequence points. */
7690 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7691 return non_lvalue (fold_convert (type, arg0));
7692 /* If second arg is constant zero, result is zero, but first arg
7693 must be evaluated. */
7694 if (integer_zerop (arg1))
7695 return omit_one_operand (type, arg1, arg0);
7696 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7697 case will be handled here. */
7698 if (integer_zerop (arg0))
7699 return omit_one_operand (type, arg0, arg1);
7701 /* !X && X is always false. */
7702 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7703 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7704 return omit_one_operand (type, integer_zero_node, arg1);
7705 /* X && !X is always false. */
7706 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7707 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7708 return omit_one_operand (type, integer_zero_node, arg0);
7711 /* We only do these simplifications if we are optimizing. */
7715 /* Check for things like (A || B) && (A || C). We can convert this
7716 to A || (B && C). Note that either operator can be any of the four
7717 truth and/or operations and the transformation will still be
7718 valid. Also note that we only care about order for the
7719 ANDIF and ORIF operators. If B contains side effects, this
7720 might change the truth-value of A. */
7721 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7722 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7723 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7724 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7725 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7726 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7728 tree a00 = TREE_OPERAND (arg0, 0);
7729 tree a01 = TREE_OPERAND (arg0, 1);
7730 tree a10 = TREE_OPERAND (arg1, 0);
7731 tree a11 = TREE_OPERAND (arg1, 1);
7732 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7733 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7734 && (code == TRUTH_AND_EXPR
7735 || code == TRUTH_OR_EXPR));
7737 if (operand_equal_p (a00, a10, 0))
7738 return fold (build2 (TREE_CODE (arg0), type, a00,
7739 fold (build2 (code, type, a01, a11))));
7740 else if (commutative && operand_equal_p (a00, a11, 0))
7741 return fold (build2 (TREE_CODE (arg0), type, a00,
7742 fold (build2 (code, type, a01, a10))));
7743 else if (commutative && operand_equal_p (a01, a10, 0))
7744 return fold (build2 (TREE_CODE (arg0), type, a01,
7745 fold (build2 (code, type, a00, a11))));
7747 /* This case if tricky because we must either have commutative
7748 operators or else A10 must not have side-effects. */
7750 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7751 && operand_equal_p (a01, a11, 0))
7752 return fold (build2 (TREE_CODE (arg0), type,
7753 fold (build2 (code, type, a00, a10)),
7757 /* See if we can build a range comparison. */
7758 if (0 != (tem = fold_range_test (t)))
7761 /* Check for the possibility of merging component references. If our
7762 lhs is another similar operation, try to merge its rhs with our
7763 rhs. Then try to merge our lhs and rhs. */
7764 if (TREE_CODE (arg0) == code
7765 && 0 != (tem = fold_truthop (code, type,
7766 TREE_OPERAND (arg0, 1), arg1)))
7767 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7769 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7774 case TRUTH_ORIF_EXPR:
7775 /* Note that the operands of this must be ints
7776 and their values must be 0 or true.
7777 ("true" is a fixed value perhaps depending on the language.) */
7778 /* If first arg is constant true, return it. */
7779 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7780 return fold_convert (type, arg0);
7782 /* If either arg is constant zero, drop it. */
7783 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7784 return non_lvalue (fold_convert (type, arg1));
7785 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7786 /* Preserve sequence points. */
7787 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7788 return non_lvalue (fold_convert (type, arg0));
7789 /* If second arg is constant true, result is true, but we must
7790 evaluate first arg. */
7791 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7792 return omit_one_operand (type, arg1, arg0);
7793 /* Likewise for first arg, but note this only occurs here for
7795 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7796 return omit_one_operand (type, arg0, arg1);
7798 /* !X || X is always true. */
7799 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7800 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7801 return omit_one_operand (type, integer_one_node, arg1);
7802 /* X || !X is always true. */
7803 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7804 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7805 return omit_one_operand (type, integer_one_node, arg0);
7809 case TRUTH_XOR_EXPR:
7810 /* If the second arg is constant zero, drop it. */
7811 if (integer_zerop (arg1))
7812 return non_lvalue (fold_convert (type, arg0));
7813 /* If the second arg is constant true, this is a logical inversion. */
7814 if (integer_onep (arg1))
7815 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7816 /* Identical arguments cancel to zero. */
7817 if (operand_equal_p (arg0, arg1, 0))
7818 return omit_one_operand (type, integer_zero_node, arg0);
7820 /* !X ^ X is always true. */
7821 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7822 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7823 return omit_one_operand (type, integer_one_node, arg1);
7825 /* X ^ !X is always true. */
7826 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7827 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7828 return omit_one_operand (type, integer_one_node, arg0);
7838 /* If one arg is a real or integer constant, put it last. */
7839 if (tree_swap_operands_p (arg0, arg1, true))
7840 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7842 /* If this is an equality comparison of the address of a non-weak
7843 object against zero, then we know the result. */
7844 if ((code == EQ_EXPR || code == NE_EXPR)
7845 && TREE_CODE (arg0) == ADDR_EXPR
7846 && DECL_P (TREE_OPERAND (arg0, 0))
7847 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7848 && integer_zerop (arg1))
7849 return constant_boolean_node (code != EQ_EXPR, type);
7851 /* If this is an equality comparison of the address of two non-weak,
7852 unaliased symbols neither of which are extern (since we do not
7853 have access to attributes for externs), then we know the result. */
7854 if ((code == EQ_EXPR || code == NE_EXPR)
7855 && TREE_CODE (arg0) == ADDR_EXPR
7856 && DECL_P (TREE_OPERAND (arg0, 0))
7857 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7858 && ! lookup_attribute ("alias",
7859 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7860 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7861 && TREE_CODE (arg1) == ADDR_EXPR
7862 && DECL_P (TREE_OPERAND (arg1, 0))
7863 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7864 && ! lookup_attribute ("alias",
7865 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7866 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7867 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7868 ? code == EQ_EXPR : code != EQ_EXPR,
7871 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7873 tree targ0 = strip_float_extensions (arg0);
7874 tree targ1 = strip_float_extensions (arg1);
7875 tree newtype = TREE_TYPE (targ0);
7877 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7878 newtype = TREE_TYPE (targ1);
7880 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7881 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7882 return fold (build2 (code, type, fold_convert (newtype, targ0),
7883 fold_convert (newtype, targ1)));
7885 /* (-a) CMP (-b) -> b CMP a */
7886 if (TREE_CODE (arg0) == NEGATE_EXPR
7887 && TREE_CODE (arg1) == NEGATE_EXPR)
7888 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7889 TREE_OPERAND (arg0, 0)));
7891 if (TREE_CODE (arg1) == REAL_CST)
7893 REAL_VALUE_TYPE cst;
7894 cst = TREE_REAL_CST (arg1);
7896 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7897 if (TREE_CODE (arg0) == NEGATE_EXPR)
7899 fold (build2 (swap_tree_comparison (code), type,
7900 TREE_OPERAND (arg0, 0),
7901 build_real (TREE_TYPE (arg1),
7902 REAL_VALUE_NEGATE (cst))));
7904 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7905 /* a CMP (-0) -> a CMP 0 */
7906 if (REAL_VALUE_MINUS_ZERO (cst))
7907 return fold (build2 (code, type, arg0,
7908 build_real (TREE_TYPE (arg1), dconst0)));
7910 /* x != NaN is always true, other ops are always false. */
7911 if (REAL_VALUE_ISNAN (cst)
7912 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7914 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7915 return omit_one_operand (type, tem, arg0);
7918 /* Fold comparisons against infinity. */
7919 if (REAL_VALUE_ISINF (cst))
7921 tem = fold_inf_compare (code, type, arg0, arg1);
7922 if (tem != NULL_TREE)
7927 /* If this is a comparison of a real constant with a PLUS_EXPR
7928 or a MINUS_EXPR of a real constant, we can convert it into a
7929 comparison with a revised real constant as long as no overflow
7930 occurs when unsafe_math_optimizations are enabled. */
7931 if (flag_unsafe_math_optimizations
7932 && TREE_CODE (arg1) == REAL_CST
7933 && (TREE_CODE (arg0) == PLUS_EXPR
7934 || TREE_CODE (arg0) == MINUS_EXPR)
7935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7936 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7937 ? MINUS_EXPR : PLUS_EXPR,
7938 arg1, TREE_OPERAND (arg0, 1), 0))
7939 && ! TREE_CONSTANT_OVERFLOW (tem))
7940 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7942 /* Likewise, we can simplify a comparison of a real constant with
7943 a MINUS_EXPR whose first operand is also a real constant, i.e.
7944 (c1 - x) < c2 becomes x > c1-c2. */
7945 if (flag_unsafe_math_optimizations
7946 && TREE_CODE (arg1) == REAL_CST
7947 && TREE_CODE (arg0) == MINUS_EXPR
7948 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7949 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7951 && ! TREE_CONSTANT_OVERFLOW (tem))
7952 return fold (build2 (swap_tree_comparison (code), type,
7953 TREE_OPERAND (arg0, 1), tem));
7955 /* Fold comparisons against built-in math functions. */
7956 if (TREE_CODE (arg1) == REAL_CST
7957 && flag_unsafe_math_optimizations
7958 && ! flag_errno_math)
7960 enum built_in_function fcode = builtin_mathfn_code (arg0);
7962 if (fcode != END_BUILTINS)
7964 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7965 if (tem != NULL_TREE)
7971 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7972 if (TREE_CONSTANT (arg1)
7973 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7974 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7975 /* This optimization is invalid for ordered comparisons
7976 if CONST+INCR overflows or if foo+incr might overflow.
7977 This optimization is invalid for floating point due to rounding.
7978 For pointer types we assume overflow doesn't happen. */
7979 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7980 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7981 && (code == EQ_EXPR || code == NE_EXPR))))
7983 tree varop, newconst;
7985 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7987 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7988 arg1, TREE_OPERAND (arg0, 1)));
7989 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7990 TREE_OPERAND (arg0, 0),
7991 TREE_OPERAND (arg0, 1));
7995 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7996 arg1, TREE_OPERAND (arg0, 1)));
7997 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7998 TREE_OPERAND (arg0, 0),
7999 TREE_OPERAND (arg0, 1));
8003 /* If VAROP is a reference to a bitfield, we must mask
8004 the constant by the width of the field. */
8005 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8006 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8007 && host_integerp (DECL_SIZE (TREE_OPERAND
8008 (TREE_OPERAND (varop, 0), 1)), 1))
8010 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8011 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8012 tree folded_compare, shift;
8014 /* First check whether the comparison would come out
8015 always the same. If we don't do that we would
8016 change the meaning with the masking. */
8017 folded_compare = fold (build2 (code, type,
8018 TREE_OPERAND (varop, 0), arg1));
8019 if (integer_zerop (folded_compare)
8020 || integer_onep (folded_compare))
8021 return omit_one_operand (type, folded_compare, varop);
8023 shift = build_int_cst (NULL_TREE,
8024 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8025 shift = fold_convert (TREE_TYPE (varop), shift);
8026 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8028 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8032 return fold (build2 (code, type, varop, newconst));
8035 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8036 This transformation affects the cases which are handled in later
8037 optimizations involving comparisons with non-negative constants. */
8038 if (TREE_CODE (arg1) == INTEGER_CST
8039 && TREE_CODE (arg0) != INTEGER_CST
8040 && tree_int_cst_sgn (arg1) > 0)
8045 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8046 return fold (build2 (GT_EXPR, type, arg0, arg1));
8049 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8050 return fold (build2 (LE_EXPR, type, arg0, arg1));
8057 /* Comparisons with the highest or lowest possible integer of
8058 the specified size will have known values.
8060 This is quite similar to fold_relational_hi_lo; however, my
8061 attempts to share the code have been nothing but trouble.
8062 I give up for now. */
8064 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8066 if (TREE_CODE (arg1) == INTEGER_CST
8067 && ! TREE_CONSTANT_OVERFLOW (arg1)
8068 && width <= HOST_BITS_PER_WIDE_INT
8069 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8070 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8072 unsigned HOST_WIDE_INT signed_max;
8073 unsigned HOST_WIDE_INT max, min;
8075 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8077 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8079 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8085 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8088 if (TREE_INT_CST_HIGH (arg1) == 0
8089 && TREE_INT_CST_LOW (arg1) == max)
8093 return omit_one_operand (type, integer_zero_node, arg0);
8096 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8099 return omit_one_operand (type, integer_one_node, arg0);
8102 return fold (build2 (NE_EXPR, type, arg0, arg1));
8104 /* The GE_EXPR and LT_EXPR cases above are not normally
8105 reached because of previous transformations. */
8110 else if (TREE_INT_CST_HIGH (arg1) == 0
8111 && TREE_INT_CST_LOW (arg1) == max - 1)
8115 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8116 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8118 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8119 return fold (build2 (NE_EXPR, type, arg0, arg1));
8123 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8124 && TREE_INT_CST_LOW (arg1) == min)
8128 return omit_one_operand (type, integer_zero_node, arg0);
8131 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8134 return omit_one_operand (type, integer_one_node, arg0);
8137 return fold (build2 (NE_EXPR, type, arg0, arg1));
8142 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8143 && TREE_INT_CST_LOW (arg1) == min + 1)
8147 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8148 return fold (build2 (NE_EXPR, type, arg0, arg1));
8150 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8151 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8156 else if (!in_gimple_form
8157 && TREE_INT_CST_HIGH (arg1) == 0
8158 && TREE_INT_CST_LOW (arg1) == signed_max
8159 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8160 /* signed_type does not work on pointer types. */
8161 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8163 /* The following case also applies to X < signed_max+1
8164 and X >= signed_max+1 because previous transformations. */
8165 if (code == LE_EXPR || code == GT_EXPR)
8168 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8169 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8171 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8172 type, fold_convert (st0, arg0),
8173 fold_convert (st1, integer_zero_node)));
8179 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8180 a MINUS_EXPR of a constant, we can convert it into a comparison with
8181 a revised constant as long as no overflow occurs. */
8182 if ((code == EQ_EXPR || code == NE_EXPR)
8183 && TREE_CODE (arg1) == INTEGER_CST
8184 && (TREE_CODE (arg0) == PLUS_EXPR
8185 || TREE_CODE (arg0) == MINUS_EXPR)
8186 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8187 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8188 ? MINUS_EXPR : PLUS_EXPR,
8189 arg1, TREE_OPERAND (arg0, 1), 0))
8190 && ! TREE_CONSTANT_OVERFLOW (tem))
8191 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8193 /* Similarly for a NEGATE_EXPR. */
8194 else if ((code == EQ_EXPR || code == NE_EXPR)
8195 && TREE_CODE (arg0) == NEGATE_EXPR
8196 && TREE_CODE (arg1) == INTEGER_CST
8197 && 0 != (tem = negate_expr (arg1))
8198 && TREE_CODE (tem) == INTEGER_CST
8199 && ! TREE_CONSTANT_OVERFLOW (tem))
8200 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8202 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8203 for !=. Don't do this for ordered comparisons due to overflow. */
8204 else if ((code == NE_EXPR || code == EQ_EXPR)
8205 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8206 return fold (build2 (code, type,
8207 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8209 /* If we are widening one operand of an integer comparison,
8210 see if the other operand is similarly being widened. Perhaps we
8211 can do the comparison in the narrower type. */
8212 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8213 && TREE_CODE (arg0) == NOP_EXPR
8214 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8215 && (code == EQ_EXPR || code == NE_EXPR
8216 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8217 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8218 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8219 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8220 || (TREE_CODE (t1) == INTEGER_CST
8221 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8222 return fold (build2 (code, type, tem,
8223 fold_convert (TREE_TYPE (tem), t1)));
8225 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8226 constant, we can simplify it. */
8227 else if (TREE_CODE (arg1) == INTEGER_CST
8228 && (TREE_CODE (arg0) == MIN_EXPR
8229 || TREE_CODE (arg0) == MAX_EXPR)
8230 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8231 return optimize_minmax_comparison (t);
8233 /* If we are comparing an ABS_EXPR with a constant, we can
8234 convert all the cases into explicit comparisons, but they may
8235 well not be faster than doing the ABS and one comparison.
8236 But ABS (X) <= C is a range comparison, which becomes a subtraction
8237 and a comparison, and is probably faster. */
8238 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8239 && TREE_CODE (arg0) == ABS_EXPR
8240 && ! TREE_SIDE_EFFECTS (arg0)
8241 && (0 != (tem = negate_expr (arg1)))
8242 && TREE_CODE (tem) == INTEGER_CST
8243 && ! TREE_CONSTANT_OVERFLOW (tem))
8244 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8245 build2 (GE_EXPR, type,
8246 TREE_OPERAND (arg0, 0), tem),
8247 build2 (LE_EXPR, type,
8248 TREE_OPERAND (arg0, 0), arg1)));
8250 /* If this is an EQ or NE comparison with zero and ARG0 is
8251 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8252 two operations, but the latter can be done in one less insn
8253 on machines that have only two-operand insns or on which a
8254 constant cannot be the first operand. */
8255 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8256 && TREE_CODE (arg0) == BIT_AND_EXPR)
8258 tree arg00 = TREE_OPERAND (arg0, 0);
8259 tree arg01 = TREE_OPERAND (arg0, 1);
8260 if (TREE_CODE (arg00) == LSHIFT_EXPR
8261 && integer_onep (TREE_OPERAND (arg00, 0)))
8263 fold (build2 (code, type,
8264 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8265 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8266 arg01, TREE_OPERAND (arg00, 1)),
8267 fold_convert (TREE_TYPE (arg0),
8270 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8271 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8273 fold (build2 (code, type,
8274 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8275 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8276 arg00, TREE_OPERAND (arg01, 1)),
8277 fold_convert (TREE_TYPE (arg0),
8282 /* If this is an NE or EQ comparison of zero against the result of a
8283 signed MOD operation whose second operand is a power of 2, make
8284 the MOD operation unsigned since it is simpler and equivalent. */
8285 if ((code == NE_EXPR || code == EQ_EXPR)
8286 && integer_zerop (arg1)
8287 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8288 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8289 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8290 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8291 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8292 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8294 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8295 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8296 fold_convert (newtype,
8297 TREE_OPERAND (arg0, 0)),
8298 fold_convert (newtype,
8299 TREE_OPERAND (arg0, 1))));
8301 return fold (build2 (code, type, newmod,
8302 fold_convert (newtype, arg1)));
8305 /* If this is an NE comparison of zero with an AND of one, remove the
8306 comparison since the AND will give the correct value. */
8307 if (code == NE_EXPR && integer_zerop (arg1)
8308 && TREE_CODE (arg0) == BIT_AND_EXPR
8309 && integer_onep (TREE_OPERAND (arg0, 1)))
8310 return fold_convert (type, arg0);
8312 /* If we have (A & C) == C where C is a power of 2, convert this into
8313 (A & C) != 0. Similarly for NE_EXPR. */
8314 if ((code == EQ_EXPR || code == NE_EXPR)
8315 && TREE_CODE (arg0) == BIT_AND_EXPR
8316 && integer_pow2p (TREE_OPERAND (arg0, 1))
8317 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8318 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8319 arg0, fold_convert (TREE_TYPE (arg0),
8320 integer_zero_node)));
8322 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8323 2, then fold the expression into shifts and logical operations. */
8324 tem = fold_single_bit_test (code, arg0, arg1, type);
8328 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8329 Similarly for NE_EXPR. */
8330 if ((code == EQ_EXPR || code == NE_EXPR)
8331 && TREE_CODE (arg0) == BIT_AND_EXPR
8332 && TREE_CODE (arg1) == INTEGER_CST
8333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8336 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8337 arg1, build1 (BIT_NOT_EXPR,
8338 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8339 TREE_OPERAND (arg0, 1))));
8340 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8341 if (integer_nonzerop (dandnotc))
8342 return omit_one_operand (type, rslt, arg0);
8345 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8346 Similarly for NE_EXPR. */
8347 if ((code == EQ_EXPR || code == NE_EXPR)
8348 && TREE_CODE (arg0) == BIT_IOR_EXPR
8349 && TREE_CODE (arg1) == INTEGER_CST
8350 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8353 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8354 TREE_OPERAND (arg0, 1),
8355 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8356 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8357 if (integer_nonzerop (candnotd))
8358 return omit_one_operand (type, rslt, arg0);
8361 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8362 and similarly for >= into !=. */
8363 if ((code == LT_EXPR || code == GE_EXPR)
8364 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8365 && TREE_CODE (arg1) == LSHIFT_EXPR
8366 && integer_onep (TREE_OPERAND (arg1, 0)))
8367 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8368 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8369 TREE_OPERAND (arg1, 1)),
8370 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8372 else if ((code == LT_EXPR || code == GE_EXPR)
8373 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8374 && (TREE_CODE (arg1) == NOP_EXPR
8375 || TREE_CODE (arg1) == CONVERT_EXPR)
8376 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8377 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8379 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8380 fold_convert (TREE_TYPE (arg0),
8381 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8382 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8384 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8386 /* Simplify comparison of something with itself. (For IEEE
8387 floating-point, we can only do some of these simplifications.) */
8388 if (operand_equal_p (arg0, arg1, 0))
8393 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8394 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8395 return constant_boolean_node (1, type);
8400 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8401 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8402 return constant_boolean_node (1, type);
8403 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8406 /* For NE, we can only do this simplification if integer
8407 or we don't honor IEEE floating point NaNs. */
8408 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8409 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8411 /* ... fall through ... */
8414 return constant_boolean_node (0, type);
8420 /* If we are comparing an expression that just has comparisons
8421 of two integer values, arithmetic expressions of those comparisons,
8422 and constants, we can simplify it. There are only three cases
8423 to check: the two values can either be equal, the first can be
8424 greater, or the second can be greater. Fold the expression for
8425 those three values. Since each value must be 0 or 1, we have
8426 eight possibilities, each of which corresponds to the constant 0
8427 or 1 or one of the six possible comparisons.
8429 This handles common cases like (a > b) == 0 but also handles
8430 expressions like ((x > y) - (y > x)) > 0, which supposedly
8431 occur in macroized code. */
8433 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8435 tree cval1 = 0, cval2 = 0;
8438 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8439 /* Don't handle degenerate cases here; they should already
8440 have been handled anyway. */
8441 && cval1 != 0 && cval2 != 0
8442 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8443 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8444 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8445 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8446 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8447 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8448 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8450 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8451 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8453 /* We can't just pass T to eval_subst in case cval1 or cval2
8454 was the same as ARG1. */
8457 = fold (build2 (code, type,
8458 eval_subst (arg0, cval1, maxval,
8462 = fold (build2 (code, type,
8463 eval_subst (arg0, cval1, maxval,
8467 = fold (build2 (code, type,
8468 eval_subst (arg0, cval1, minval,
8472 /* All three of these results should be 0 or 1. Confirm they
8473 are. Then use those values to select the proper code
8476 if ((integer_zerop (high_result)
8477 || integer_onep (high_result))
8478 && (integer_zerop (equal_result)
8479 || integer_onep (equal_result))
8480 && (integer_zerop (low_result)
8481 || integer_onep (low_result)))
8483 /* Make a 3-bit mask with the high-order bit being the
8484 value for `>', the next for '=', and the low for '<'. */
8485 switch ((integer_onep (high_result) * 4)
8486 + (integer_onep (equal_result) * 2)
8487 + integer_onep (low_result))
8491 return omit_one_operand (type, integer_zero_node, arg0);
8512 return omit_one_operand (type, integer_one_node, arg0);
8515 tem = build2 (code, type, cval1, cval2);
8517 return save_expr (tem);
8524 /* If this is a comparison of a field, we may be able to simplify it. */
8525 if (((TREE_CODE (arg0) == COMPONENT_REF
8526 && lang_hooks.can_use_bit_fields_p ())
8527 || TREE_CODE (arg0) == BIT_FIELD_REF)
8528 && (code == EQ_EXPR || code == NE_EXPR)
8529 /* Handle the constant case even without -O
8530 to make sure the warnings are given. */
8531 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8533 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8538 /* If this is a comparison of complex values and either or both sides
8539 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8540 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8541 This may prevent needless evaluations. */
8542 if ((code == EQ_EXPR || code == NE_EXPR)
8543 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8544 && (TREE_CODE (arg0) == COMPLEX_EXPR
8545 || TREE_CODE (arg1) == COMPLEX_EXPR
8546 || TREE_CODE (arg0) == COMPLEX_CST
8547 || TREE_CODE (arg1) == COMPLEX_CST))
8549 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8550 tree real0, imag0, real1, imag1;
8552 arg0 = save_expr (arg0);
8553 arg1 = save_expr (arg1);
8554 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8555 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8556 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8557 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8559 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8562 fold (build2 (code, type, real0, real1)),
8563 fold (build2 (code, type, imag0, imag1))));
8566 /* Optimize comparisons of strlen vs zero to a compare of the
8567 first character of the string vs zero. To wit,
8568 strlen(ptr) == 0 => *ptr == 0
8569 strlen(ptr) != 0 => *ptr != 0
8570 Other cases should reduce to one of these two (or a constant)
8571 due to the return value of strlen being unsigned. */
8572 if ((code == EQ_EXPR || code == NE_EXPR)
8573 && integer_zerop (arg1)
8574 && TREE_CODE (arg0) == CALL_EXPR)
8576 tree fndecl = get_callee_fndecl (arg0);
8580 && DECL_BUILT_IN (fndecl)
8581 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8582 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8583 && (arglist = TREE_OPERAND (arg0, 1))
8584 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8585 && ! TREE_CHAIN (arglist))
8586 return fold (build2 (code, type,
8587 build1 (INDIRECT_REF, char_type_node,
8588 TREE_VALUE (arglist)),
8589 fold_convert (char_type_node,
8590 integer_zero_node)));
8593 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8594 into a single range test. */
8595 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8596 && TREE_CODE (arg1) == INTEGER_CST
8597 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8598 && !integer_zerop (TREE_OPERAND (arg0, 1))
8599 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8600 && !TREE_OVERFLOW (arg1))
8602 t1 = fold_div_compare (code, type, arg0, arg1);
8603 if (t1 != NULL_TREE)
8607 if ((code == EQ_EXPR || code == NE_EXPR)
8608 && !TREE_SIDE_EFFECTS (arg0)
8609 && integer_zerop (arg1)
8610 && tree_expr_nonzero_p (arg0))
8611 return constant_boolean_node (code==NE_EXPR, type);
8613 t1 = fold_relational_const (code, type, arg0, arg1);
8614 return t1 == NULL_TREE ? t : t1;
8616 case UNORDERED_EXPR:
8624 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8626 t1 = fold_relational_const (code, type, arg0, arg1);
8627 if (t1 != NULL_TREE)
8631 /* If the first operand is NaN, the result is constant. */
8632 if (TREE_CODE (arg0) == REAL_CST
8633 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8634 && (code != LTGT_EXPR || ! flag_trapping_math))
8636 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8639 return omit_one_operand (type, t1, arg1);
8642 /* If the second operand is NaN, the result is constant. */
8643 if (TREE_CODE (arg1) == REAL_CST
8644 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8645 && (code != LTGT_EXPR || ! flag_trapping_math))
8647 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8650 return omit_one_operand (type, t1, arg0);
8653 /* Simplify unordered comparison of something with itself. */
8654 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8655 && operand_equal_p (arg0, arg1, 0))
8656 return constant_boolean_node (1, type);
8658 if (code == LTGT_EXPR
8659 && !flag_trapping_math
8660 && operand_equal_p (arg0, arg1, 0))
8661 return constant_boolean_node (0, type);
8663 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8665 tree targ0 = strip_float_extensions (arg0);
8666 tree targ1 = strip_float_extensions (arg1);
8667 tree newtype = TREE_TYPE (targ0);
8669 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8670 newtype = TREE_TYPE (targ1);
8672 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8673 return fold (build2 (code, type, fold_convert (newtype, targ0),
8674 fold_convert (newtype, targ1)));
8680 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8681 so all simple results must be passed through pedantic_non_lvalue. */
8682 if (TREE_CODE (arg0) == INTEGER_CST)
8684 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8685 /* Only optimize constant conditions when the selected branch
8686 has the same type as the COND_EXPR. This avoids optimizing
8687 away "c ? x : throw", where the throw has a void type. */
8688 if (! VOID_TYPE_P (TREE_TYPE (tem))
8689 || VOID_TYPE_P (type))
8690 return pedantic_non_lvalue (tem);
8693 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8694 return pedantic_omit_one_operand (type, arg1, arg0);
8696 /* If we have A op B ? A : C, we may be able to convert this to a
8697 simpler expression, depending on the operation and the values
8698 of B and C. Signed zeros prevent all of these transformations,
8699 for reasons given above each one.
8701 Also try swapping the arguments and inverting the conditional. */
8702 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8703 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8704 arg1, TREE_OPERAND (arg0, 1))
8705 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8707 tem = fold_cond_expr_with_comparison (type, arg0,
8708 TREE_OPERAND (t, 1),
8709 TREE_OPERAND (t, 2));
8714 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8715 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8716 TREE_OPERAND (t, 2),
8717 TREE_OPERAND (arg0, 1))
8718 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8720 tem = invert_truthvalue (arg0);
8721 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8723 tem = fold_cond_expr_with_comparison (type, tem,
8724 TREE_OPERAND (t, 2),
8725 TREE_OPERAND (t, 1));
8731 /* If the second operand is simpler than the third, swap them
8732 since that produces better jump optimization results. */
8733 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8734 TREE_OPERAND (t, 2), false))
8736 /* See if this can be inverted. If it can't, possibly because
8737 it was a floating-point inequality comparison, don't do
8739 tem = invert_truthvalue (arg0);
8741 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8742 return fold (build3 (code, type, tem,
8743 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8746 /* Convert A ? 1 : 0 to simply A. */
8747 if (integer_onep (TREE_OPERAND (t, 1))
8748 && integer_zerop (TREE_OPERAND (t, 2))
8749 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8750 call to fold will try to move the conversion inside
8751 a COND, which will recurse. In that case, the COND_EXPR
8752 is probably the best choice, so leave it alone. */
8753 && type == TREE_TYPE (arg0))
8754 return pedantic_non_lvalue (arg0);
8756 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8757 over COND_EXPR in cases such as floating point comparisons. */
8758 if (integer_zerop (TREE_OPERAND (t, 1))
8759 && integer_onep (TREE_OPERAND (t, 2))
8760 && truth_value_p (TREE_CODE (arg0)))
8761 return pedantic_non_lvalue (fold_convert (type,
8762 invert_truthvalue (arg0)));
8764 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8765 if (TREE_CODE (arg0) == LT_EXPR
8766 && integer_zerop (TREE_OPERAND (arg0, 1))
8767 && integer_zerop (TREE_OPERAND (t, 2))
8768 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8769 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8770 TREE_TYPE (tem), tem, arg1)));
8772 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8773 already handled above. */
8774 if (TREE_CODE (arg0) == BIT_AND_EXPR
8775 && integer_onep (TREE_OPERAND (arg0, 1))
8776 && integer_zerop (TREE_OPERAND (t, 2))
8777 && integer_pow2p (arg1))
8779 tree tem = TREE_OPERAND (arg0, 0);
8781 if (TREE_CODE (tem) == RSHIFT_EXPR
8782 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
8783 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8784 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8785 return fold (build2 (BIT_AND_EXPR, type,
8786 TREE_OPERAND (tem, 0), arg1));
8789 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8790 is probably obsolete because the first operand should be a
8791 truth value (that's why we have the two cases above), but let's
8792 leave it in until we can confirm this for all front-ends. */
8793 if (integer_zerop (TREE_OPERAND (t, 2))
8794 && TREE_CODE (arg0) == NE_EXPR
8795 && integer_zerop (TREE_OPERAND (arg0, 1))
8796 && integer_pow2p (arg1)
8797 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8798 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8799 arg1, OEP_ONLY_CONST))
8800 return pedantic_non_lvalue (fold_convert (type,
8801 TREE_OPERAND (arg0, 0)));
8803 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8804 if (integer_zerop (TREE_OPERAND (t, 2))
8805 && truth_value_p (TREE_CODE (arg0))
8806 && truth_value_p (TREE_CODE (arg1)))
8807 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8809 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8810 if (integer_onep (TREE_OPERAND (t, 2))
8811 && truth_value_p (TREE_CODE (arg0))
8812 && truth_value_p (TREE_CODE (arg1)))
8814 /* Only perform transformation if ARG0 is easily inverted. */
8815 tem = invert_truthvalue (arg0);
8816 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8817 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8820 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8821 if (integer_zerop (arg1)
8822 && truth_value_p (TREE_CODE (arg0))
8823 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8825 /* Only perform transformation if ARG0 is easily inverted. */
8826 tem = invert_truthvalue (arg0);
8827 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8828 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8829 TREE_OPERAND (t, 2)));
8832 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8833 if (integer_onep (arg1)
8834 && truth_value_p (TREE_CODE (arg0))
8835 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8836 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8837 TREE_OPERAND (t, 2)));
8842 /* When pedantic, a compound expression can be neither an lvalue
8843 nor an integer constant expression. */
8844 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8846 /* Don't let (0, 0) be null pointer constant. */
8847 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8848 : fold_convert (type, arg1);
8849 return pedantic_non_lvalue (tem);
8853 return build_complex (type, arg0, arg1);
8857 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8859 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8860 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8861 TREE_OPERAND (arg0, 1));
8862 else if (TREE_CODE (arg0) == COMPLEX_CST)
8863 return TREE_REALPART (arg0);
8864 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8865 return fold (build2 (TREE_CODE (arg0), type,
8866 fold (build1 (REALPART_EXPR, type,
8867 TREE_OPERAND (arg0, 0))),
8868 fold (build1 (REALPART_EXPR, type,
8869 TREE_OPERAND (arg0, 1)))));
8873 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8874 return fold_convert (type, integer_zero_node);
8875 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8876 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8877 TREE_OPERAND (arg0, 0));
8878 else if (TREE_CODE (arg0) == COMPLEX_CST)
8879 return TREE_IMAGPART (arg0);
8880 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8881 return fold (build2 (TREE_CODE (arg0), type,
8882 fold (build1 (IMAGPART_EXPR, type,
8883 TREE_OPERAND (arg0, 0))),
8884 fold (build1 (IMAGPART_EXPR, type,
8885 TREE_OPERAND (arg0, 1)))));
8888 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8890 case CLEANUP_POINT_EXPR:
8891 if (! has_cleanups (arg0))
8892 return TREE_OPERAND (t, 0);
8895 enum tree_code code0 = TREE_CODE (arg0);
8896 int kind0 = TREE_CODE_CLASS (code0);
8897 tree arg00 = TREE_OPERAND (arg0, 0);
8900 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8901 return fold (build1 (code0, type,
8902 fold (build1 (CLEANUP_POINT_EXPR,
8903 TREE_TYPE (arg00), arg00))));
8905 if (kind0 == '<' || kind0 == '2'
8906 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8907 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8908 || code0 == TRUTH_XOR_EXPR)
8910 arg01 = TREE_OPERAND (arg0, 1);
8912 if (TREE_CONSTANT (arg00)
8913 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8914 && ! has_cleanups (arg00)))
8915 return fold (build2 (code0, type, arg00,
8916 fold (build1 (CLEANUP_POINT_EXPR,
8917 TREE_TYPE (arg01), arg01))));
8919 if (TREE_CONSTANT (arg01))
8920 return fold (build2 (code0, type,
8921 fold (build1 (CLEANUP_POINT_EXPR,
8922 TREE_TYPE (arg00), arg00)),
8930 /* Check for a built-in function. */
8931 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8932 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8934 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8936 tree tmp = fold_builtin (t, false);
8944 } /* switch (code) */
8947 #ifdef ENABLE_FOLD_CHECKING
8950 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8951 static void fold_check_failed (tree, tree);
8952 void print_fold_checksum (tree);
8954 /* When --enable-checking=fold, compute a digest of expr before
8955 and after actual fold call to see if fold did not accidentally
8956 change original expr. */
8963 unsigned char checksum_before[16], checksum_after[16];
8966 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8967 md5_init_ctx (&ctx);
8968 fold_checksum_tree (expr, &ctx, ht);
8969 md5_finish_ctx (&ctx, checksum_before);
8972 ret = fold_1 (expr);
8974 md5_init_ctx (&ctx);
8975 fold_checksum_tree (expr, &ctx, ht);
8976 md5_finish_ctx (&ctx, checksum_after);
8979 if (memcmp (checksum_before, checksum_after, 16))
8980 fold_check_failed (expr, ret);
8986 print_fold_checksum (tree expr)
8989 unsigned char checksum[16], cnt;
8992 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8993 md5_init_ctx (&ctx);
8994 fold_checksum_tree (expr, &ctx, ht);
8995 md5_finish_ctx (&ctx, checksum);
8997 for (cnt = 0; cnt < 16; ++cnt)
8998 fprintf (stderr, "%02x", checksum[cnt]);
8999 putc ('\n', stderr);
9003 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9005 internal_error ("fold check: original tree changed by fold");
9009 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9012 enum tree_code code;
9013 char buf[sizeof (struct tree_decl)];
9016 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
9017 > sizeof (struct tree_decl)
9018 || sizeof (struct tree_type) > sizeof (struct tree_decl))
9022 slot = htab_find_slot (ht, expr, INSERT);
9026 code = TREE_CODE (expr);
9027 if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9029 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9030 memcpy (buf, expr, tree_size (expr));
9032 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9034 else if (TREE_CODE_CLASS (code) == 't'
9035 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9037 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9038 memcpy (buf, expr, tree_size (expr));
9040 TYPE_POINTER_TO (expr) = NULL;
9041 TYPE_REFERENCE_TO (expr) = NULL;
9043 md5_process_bytes (expr, tree_size (expr), ctx);
9044 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9045 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9046 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9047 switch (TREE_CODE_CLASS (code))
9053 md5_process_bytes (TREE_STRING_POINTER (expr),
9054 TREE_STRING_LENGTH (expr), ctx);
9057 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9058 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9061 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9071 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9072 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9075 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9076 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9088 len = first_rtl_op (code);
9089 for (i = 0; i < len; ++i)
9090 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9093 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9094 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9095 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9096 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9097 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9098 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9099 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9100 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9101 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9102 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9103 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9106 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9107 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9108 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9109 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9110 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9111 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9112 if (INTEGRAL_TYPE_P (expr)
9113 || SCALAR_FLOAT_TYPE_P (expr))
9115 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9116 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9118 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9119 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9120 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9129 /* Perform constant folding and related simplification of initializer
9130 expression EXPR. This behaves identically to "fold" but ignores
9131 potential run-time traps and exceptions that fold must preserve. */
9134 fold_initializer (tree expr)
9136 int saved_signaling_nans = flag_signaling_nans;
9137 int saved_trapping_math = flag_trapping_math;
9138 int saved_trapv = flag_trapv;
9141 flag_signaling_nans = 0;
9142 flag_trapping_math = 0;
9145 result = fold (expr);
9147 flag_signaling_nans = saved_signaling_nans;
9148 flag_trapping_math = saved_trapping_math;
9149 flag_trapv = saved_trapv;
9154 /* Determine if first argument is a multiple of second argument. Return 0 if
9155 it is not, or we cannot easily determined it to be.
9157 An example of the sort of thing we care about (at this point; this routine
9158 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9159 fold cases do now) is discovering that
9161 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9167 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9169 This code also handles discovering that
9171 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9173 is a multiple of 8 so we don't have to worry about dealing with a
9176 Note that we *look* inside a SAVE_EXPR only to determine how it was
9177 calculated; it is not safe for fold to do much of anything else with the
9178 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9179 at run time. For example, the latter example above *cannot* be implemented
9180 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9181 evaluation time of the original SAVE_EXPR is not necessarily the same at
9182 the time the new expression is evaluated. The only optimization of this
9183 sort that would be valid is changing
9185 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9189 SAVE_EXPR (I) * SAVE_EXPR (J)
9191 (where the same SAVE_EXPR (J) is used in the original and the
9192 transformed version). */
9195 multiple_of_p (tree type, tree top, tree bottom)
9197 if (operand_equal_p (top, bottom, 0))
9200 if (TREE_CODE (type) != INTEGER_TYPE)
9203 switch (TREE_CODE (top))
9206 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9207 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9211 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9212 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9215 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9219 op1 = TREE_OPERAND (top, 1);
9220 /* const_binop may not detect overflow correctly,
9221 so check for it explicitly here. */
9222 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9223 > TREE_INT_CST_LOW (op1)
9224 && TREE_INT_CST_HIGH (op1) == 0
9225 && 0 != (t1 = fold_convert (type,
9226 const_binop (LSHIFT_EXPR,
9229 && ! TREE_OVERFLOW (t1))
9230 return multiple_of_p (type, t1, bottom);
9235 /* Can't handle conversions from non-integral or wider integral type. */
9236 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9237 || (TYPE_PRECISION (type)
9238 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9241 /* .. fall through ... */
9244 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9247 if (TREE_CODE (bottom) != INTEGER_CST
9248 || (TYPE_UNSIGNED (type)
9249 && (tree_int_cst_sgn (top) < 0
9250 || tree_int_cst_sgn (bottom) < 0)))
9252 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9260 /* Return true if `t' is known to be non-negative. */
9263 tree_expr_nonnegative_p (tree t)
9265 switch (TREE_CODE (t))
9271 return tree_int_cst_sgn (t) >= 0;
9274 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9277 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9278 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9279 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9281 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9282 both unsigned and at least 2 bits shorter than the result. */
9283 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9284 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9285 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9287 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9288 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9289 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9290 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9292 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9293 TYPE_PRECISION (inner2)) + 1;
9294 return prec < TYPE_PRECISION (TREE_TYPE (t));
9300 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9302 /* x * x for floating point x is always non-negative. */
9303 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9305 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9306 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9309 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9310 both unsigned and their total bits is shorter than the result. */
9311 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9312 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9313 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9315 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9316 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9317 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9318 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9319 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9320 < TYPE_PRECISION (TREE_TYPE (t));
9324 case TRUNC_DIV_EXPR:
9326 case FLOOR_DIV_EXPR:
9327 case ROUND_DIV_EXPR:
9328 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9329 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9331 case TRUNC_MOD_EXPR:
9333 case FLOOR_MOD_EXPR:
9334 case ROUND_MOD_EXPR:
9335 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9338 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9339 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9342 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9343 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9346 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9347 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9351 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9352 tree outer_type = TREE_TYPE (t);
9354 if (TREE_CODE (outer_type) == REAL_TYPE)
9356 if (TREE_CODE (inner_type) == REAL_TYPE)
9357 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9358 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9360 if (TYPE_UNSIGNED (inner_type))
9362 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9365 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9367 if (TREE_CODE (inner_type) == REAL_TYPE)
9368 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9369 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9370 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9371 && TYPE_UNSIGNED (inner_type);
9377 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9378 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9380 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9382 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9383 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9385 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9386 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9388 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9390 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9392 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9393 case NON_LVALUE_EXPR:
9394 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9396 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9400 tree temp = TARGET_EXPR_SLOT (t);
9401 t = TARGET_EXPR_INITIAL (t);
9403 /* If the initializer is non-void, then it's a normal expression
9404 that will be assigned to the slot. */
9405 if (!VOID_TYPE_P (t))
9406 return tree_expr_nonnegative_p (t);
9408 /* Otherwise, the initializer sets the slot in some way. One common
9409 way is an assignment statement at the end of the initializer. */
9412 if (TREE_CODE (t) == BIND_EXPR)
9413 t = expr_last (BIND_EXPR_BODY (t));
9414 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9415 || TREE_CODE (t) == TRY_CATCH_EXPR)
9416 t = expr_last (TREE_OPERAND (t, 0));
9417 else if (TREE_CODE (t) == STATEMENT_LIST)
9422 if (TREE_CODE (t) == MODIFY_EXPR
9423 && TREE_OPERAND (t, 0) == temp)
9424 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9431 tree fndecl = get_callee_fndecl (t);
9432 tree arglist = TREE_OPERAND (t, 1);
9434 && DECL_BUILT_IN (fndecl)
9435 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9436 switch (DECL_FUNCTION_CODE (fndecl))
9438 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9439 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9440 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9441 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9443 CASE_BUILTIN_F (BUILT_IN_ACOS)
9444 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9445 CASE_BUILTIN_F (BUILT_IN_CABS)
9446 CASE_BUILTIN_F (BUILT_IN_COSH)
9447 CASE_BUILTIN_F (BUILT_IN_ERFC)
9448 CASE_BUILTIN_F (BUILT_IN_EXP)
9449 CASE_BUILTIN_F (BUILT_IN_EXP10)
9450 CASE_BUILTIN_F (BUILT_IN_EXP2)
9451 CASE_BUILTIN_F (BUILT_IN_FABS)
9452 CASE_BUILTIN_F (BUILT_IN_FDIM)
9453 CASE_BUILTIN_F (BUILT_IN_FREXP)
9454 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9455 CASE_BUILTIN_F (BUILT_IN_POW10)
9456 CASE_BUILTIN_I (BUILT_IN_FFS)
9457 CASE_BUILTIN_I (BUILT_IN_PARITY)
9458 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9462 CASE_BUILTIN_F (BUILT_IN_SQRT)
9463 /* sqrt(-0.0) is -0.0. */
9464 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9466 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9468 CASE_BUILTIN_F (BUILT_IN_ASINH)
9469 CASE_BUILTIN_F (BUILT_IN_ATAN)
9470 CASE_BUILTIN_F (BUILT_IN_ATANH)
9471 CASE_BUILTIN_F (BUILT_IN_CBRT)
9472 CASE_BUILTIN_F (BUILT_IN_CEIL)
9473 CASE_BUILTIN_F (BUILT_IN_ERF)
9474 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9475 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9476 CASE_BUILTIN_F (BUILT_IN_FMOD)
9477 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9478 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9479 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9480 CASE_BUILTIN_F (BUILT_IN_LRINT)
9481 CASE_BUILTIN_F (BUILT_IN_LROUND)
9482 CASE_BUILTIN_F (BUILT_IN_MODF)
9483 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9484 CASE_BUILTIN_F (BUILT_IN_POW)
9485 CASE_BUILTIN_F (BUILT_IN_RINT)
9486 CASE_BUILTIN_F (BUILT_IN_ROUND)
9487 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9488 CASE_BUILTIN_F (BUILT_IN_SINH)
9489 CASE_BUILTIN_F (BUILT_IN_TANH)
9490 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9491 /* True if the 1st argument is nonnegative. */
9492 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9494 CASE_BUILTIN_F (BUILT_IN_FMAX)
9495 /* True if the 1st OR 2nd arguments are nonnegative. */
9496 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9497 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9499 CASE_BUILTIN_F (BUILT_IN_FMIN)
9500 /* True if the 1st AND 2nd arguments are nonnegative. */
9501 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9502 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9504 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9505 /* True if the 2nd argument is nonnegative. */
9506 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9510 #undef CASE_BUILTIN_F
9511 #undef CASE_BUILTIN_I
9515 /* ... fall through ... */
9518 if (truth_value_p (TREE_CODE (t)))
9519 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9523 /* We don't know sign of `t', so be conservative and return false. */
9527 /* Return true when T is an address and is known to be nonzero.
9528 For floating point we further ensure that T is not denormal.
9529 Similar logic is present in nonzero_address in rtlanal.h */
9532 tree_expr_nonzero_p (tree t)
9534 tree type = TREE_TYPE (t);
9536 /* Doing something useful for floating point would need more work. */
9537 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9540 switch (TREE_CODE (t))
9543 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9544 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9547 /* We used to test for !integer_zerop here. This does not work correctly
9548 if TREE_CONSTANT_OVERFLOW (t). */
9549 return (TREE_INT_CST_LOW (t) != 0
9550 || TREE_INT_CST_HIGH (t) != 0);
9553 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9555 /* With the presence of negative values it is hard
9556 to say something. */
9557 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9558 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9560 /* One of operands must be positive and the other non-negative. */
9561 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9562 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9567 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9569 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9570 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9576 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9577 tree outer_type = TREE_TYPE (t);
9579 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9580 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9585 /* Weak declarations may link to NULL. */
9586 if (DECL_P (TREE_OPERAND (t, 0)))
9587 return !DECL_WEAK (TREE_OPERAND (t, 0));
9588 /* Constants and all other cases are never weak. */
9592 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9593 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9596 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9597 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9600 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9602 /* When both operands are nonzero, then MAX must be too. */
9603 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9606 /* MAX where operand 0 is positive is positive. */
9607 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9609 /* MAX where operand 1 is positive is positive. */
9610 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9611 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9618 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9621 case NON_LVALUE_EXPR:
9622 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9625 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9626 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9634 /* See if we are applying CODE, a relational to the highest or lowest
9635 possible integer of TYPE. If so, then the result is a compile
9639 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9644 enum tree_code code = *code_p;
9645 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9647 if (TREE_CODE (op1) == INTEGER_CST
9648 && ! TREE_CONSTANT_OVERFLOW (op1)
9649 && width <= HOST_BITS_PER_WIDE_INT
9650 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9651 || POINTER_TYPE_P (TREE_TYPE (op1))))
9653 unsigned HOST_WIDE_INT signed_max;
9654 unsigned HOST_WIDE_INT max, min;
9656 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9658 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9660 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9666 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9669 if (TREE_INT_CST_HIGH (op1) == 0
9670 && TREE_INT_CST_LOW (op1) == max)
9674 return omit_one_operand (type, integer_zero_node, op0);
9680 return omit_one_operand (type, integer_one_node, op0);
9686 /* The GE_EXPR and LT_EXPR cases above are not normally
9687 reached because of previous transformations. */
9692 else if (TREE_INT_CST_HIGH (op1) == 0
9693 && TREE_INT_CST_LOW (op1) == max - 1)
9698 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9702 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9707 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9708 && TREE_INT_CST_LOW (op1) == min)
9712 return omit_one_operand (type, integer_zero_node, op0);
9719 return omit_one_operand (type, integer_one_node, op0);
9728 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9729 && TREE_INT_CST_LOW (op1) == min + 1)
9734 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9738 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9744 else if (TREE_INT_CST_HIGH (op1) == 0
9745 && TREE_INT_CST_LOW (op1) == signed_max
9746 && TYPE_UNSIGNED (TREE_TYPE (op1))
9747 /* signed_type does not work on pointer types. */
9748 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9750 /* The following case also applies to X < signed_max+1
9751 and X >= signed_max+1 because previous transformations. */
9752 if (code == LE_EXPR || code == GT_EXPR)
9754 tree st0, st1, exp, retval;
9755 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9756 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9758 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9760 fold_convert (st0, op0),
9761 fold_convert (st1, integer_zero_node));
9764 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9766 TREE_OPERAND (exp, 0),
9767 TREE_OPERAND (exp, 1));
9769 /* If we are in gimple form, then returning EXP would create
9770 non-gimple expressions. Clearing it is safe and insures
9771 we do not allow a non-gimple expression to escape. */
9775 return (retval ? retval : exp);
9784 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9785 attempt to fold the expression to a constant without modifying TYPE,
9788 If the expression could be simplified to a constant, then return
9789 the constant. If the expression would not be simplified to a
9790 constant, then return NULL_TREE.
9792 Note this is primarily designed to be called after gimplification
9793 of the tree structures and when at least one operand is a constant.
9794 As a result of those simplifying assumptions this routine is far
9795 simpler than the generic fold routine. */
9798 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9806 /* If this is a commutative operation, and ARG0 is a constant, move it
9807 to ARG1 to reduce the number of tests below. */
9808 if (commutative_tree_code (code)
9809 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9816 /* If either operand is a complex type, extract its real component. */
9817 if (TREE_CODE (op0) == COMPLEX_CST)
9818 subop0 = TREE_REALPART (op0);
9822 if (TREE_CODE (op1) == COMPLEX_CST)
9823 subop1 = TREE_REALPART (op1);
9827 /* Note if either argument is not a real or integer constant.
9828 With a few exceptions, simplification is limited to cases
9829 where both arguments are constants. */
9830 if ((TREE_CODE (subop0) != INTEGER_CST
9831 && TREE_CODE (subop0) != REAL_CST)
9832 || (TREE_CODE (subop1) != INTEGER_CST
9833 && TREE_CODE (subop1) != REAL_CST))
9839 /* (plus (address) (const_int)) is a constant. */
9840 if (TREE_CODE (op0) == PLUS_EXPR
9841 && TREE_CODE (op1) == INTEGER_CST
9842 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9843 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9844 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9846 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9848 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9849 const_binop (PLUS_EXPR, op1,
9850 TREE_OPERAND (op0, 1), 0));
9858 /* Both arguments are constants. Simplify. */
9859 tem = const_binop (code, op0, op1, 0);
9860 if (tem != NULL_TREE)
9862 /* The return value should always have the same type as
9863 the original expression. */
9864 if (TREE_TYPE (tem) != type)
9865 tem = fold_convert (type, tem);
9872 /* Fold &x - &x. This can happen from &x.foo - &x.
9873 This is unsafe for certain floats even in non-IEEE formats.
9874 In IEEE, it is unsafe because it does wrong for NaNs.
9875 Also note that operand_equal_p is always false if an
9876 operand is volatile. */
9877 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9878 return fold_convert (type, integer_zero_node);
9884 /* Special case multiplication or bitwise AND where one argument
9886 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9887 return omit_one_operand (type, op1, op0);
9889 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9890 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9891 && real_zerop (op1))
9892 return omit_one_operand (type, op1, op0);
9897 /* Special case when we know the result will be all ones. */
9898 if (integer_all_onesp (op1))
9899 return omit_one_operand (type, op1, op0);
9903 case TRUNC_DIV_EXPR:
9904 case ROUND_DIV_EXPR:
9905 case FLOOR_DIV_EXPR:
9907 case EXACT_DIV_EXPR:
9908 case TRUNC_MOD_EXPR:
9909 case ROUND_MOD_EXPR:
9910 case FLOOR_MOD_EXPR:
9913 /* Division by zero is undefined. */
9914 if (integer_zerop (op1))
9917 if (TREE_CODE (op1) == REAL_CST
9918 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9919 && real_zerop (op1))
9925 if (INTEGRAL_TYPE_P (type)
9926 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9927 return omit_one_operand (type, op1, op0);
9932 if (INTEGRAL_TYPE_P (type)
9933 && TYPE_MAX_VALUE (type)
9934 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9935 return omit_one_operand (type, op1, op0);
9940 /* Optimize -1 >> x for arithmetic right shifts. */
9941 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9942 return omit_one_operand (type, op0, op1);
9943 /* ... fall through ... */
9946 if (integer_zerop (op0))
9947 return omit_one_operand (type, op0, op1);
9949 /* Since negative shift count is not well-defined, don't
9950 try to compute it in the compiler. */
9951 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9958 /* -1 rotated either direction by any amount is still -1. */
9959 if (integer_all_onesp (op0))
9960 return omit_one_operand (type, op0, op1);
9962 /* 0 rotated either direction by any amount is still zero. */
9963 if (integer_zerop (op0))
9964 return omit_one_operand (type, op0, op1);
9970 return build_complex (type, op0, op1);
9979 /* If one arg is a real or integer constant, put it last. */
9980 if ((TREE_CODE (op0) == INTEGER_CST
9981 && TREE_CODE (op1) != INTEGER_CST)
9982 || (TREE_CODE (op0) == REAL_CST
9983 && TREE_CODE (op0) != REAL_CST))
9990 code = swap_tree_comparison (code);
9993 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9994 This transformation affects the cases which are handled in later
9995 optimizations involving comparisons with non-negative constants. */
9996 if (TREE_CODE (op1) == INTEGER_CST
9997 && TREE_CODE (op0) != INTEGER_CST
9998 && tree_int_cst_sgn (op1) > 0)
10004 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10009 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10017 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10021 /* Fall through. */
10024 case UNORDERED_EXPR:
10034 return fold_relational_const (code, type, op0, op1);
10037 /* This could probably be handled. */
10040 case TRUTH_AND_EXPR:
10041 /* If second arg is constant zero, result is zero, but first arg
10042 must be evaluated. */
10043 if (integer_zerop (op1))
10044 return omit_one_operand (type, op1, op0);
10045 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10046 case will be handled here. */
10047 if (integer_zerop (op0))
10048 return omit_one_operand (type, op0, op1);
10049 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10050 return constant_boolean_node (true, type);
10053 case TRUTH_OR_EXPR:
10054 /* If second arg is constant true, result is true, but we must
10055 evaluate first arg. */
10056 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10057 return omit_one_operand (type, op1, op0);
10058 /* Likewise for first arg, but note this only occurs here for
10060 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10061 return omit_one_operand (type, op0, op1);
10062 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10063 return constant_boolean_node (false, type);
10066 case TRUTH_XOR_EXPR:
10067 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10069 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10070 return constant_boolean_node (x, type);
10079 /* Given the components of a unary expression CODE, TYPE and OP0,
10080 attempt to fold the expression to a constant without modifying
10083 If the expression could be simplified to a constant, then return
10084 the constant. If the expression would not be simplified to a
10085 constant, then return NULL_TREE.
10087 Note this is primarily designed to be called after gimplification
10088 of the tree structures and when op0 is a constant. As a result
10089 of those simplifying assumptions this routine is far simpler than
10090 the generic fold routine. */
10093 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10096 /* Make sure we have a suitable constant argument. */
10097 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10101 if (TREE_CODE (op0) == COMPLEX_CST)
10102 subop = TREE_REALPART (op0);
10106 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10115 case FIX_TRUNC_EXPR:
10116 case FIX_FLOOR_EXPR:
10117 case FIX_CEIL_EXPR:
10118 return fold_convert_const (code, type, op0);
10121 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10122 return fold_negate_const (op0, type);
10127 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10128 return fold_abs_const (op0, type);
10133 if (TREE_CODE (op0) == INTEGER_CST)
10134 return fold_not_const (op0, type);
10138 case REALPART_EXPR:
10139 if (TREE_CODE (op0) == COMPLEX_CST)
10140 return TREE_REALPART (op0);
10144 case IMAGPART_EXPR:
10145 if (TREE_CODE (op0) == COMPLEX_CST)
10146 return TREE_IMAGPART (op0);
10151 if (TREE_CODE (op0) == COMPLEX_CST
10152 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10153 return build_complex (type, TREE_REALPART (op0),
10154 negate_expr (TREE_IMAGPART (op0)));
10162 /* If EXP represents referencing an element in a constant string
10163 (either via pointer arithmetic or array indexing), return the
10164 tree representing the value accessed, otherwise return NULL. */
10167 fold_read_from_constant_string (tree exp)
10169 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10171 tree exp1 = TREE_OPERAND (exp, 0);
10175 if (TREE_CODE (exp) == INDIRECT_REF)
10176 string = string_constant (exp1, &index);
10179 tree low_bound = array_ref_low_bound (exp);
10180 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10182 /* Optimize the special-case of a zero lower bound.
10184 We convert the low_bound to sizetype to avoid some problems
10185 with constant folding. (E.g. suppose the lower bound is 1,
10186 and its mode is QI. Without the conversion,l (ARRAY
10187 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10188 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10189 if (! integer_zerop (low_bound))
10190 index = size_diffop (index, fold_convert (sizetype, low_bound));
10196 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10197 && TREE_CODE (string) == STRING_CST
10198 && TREE_CODE (index) == INTEGER_CST
10199 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10200 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10202 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10203 return fold_convert (TREE_TYPE (exp),
10204 build_int_cst (NULL_TREE,
10205 (TREE_STRING_POINTER (string)
10206 [TREE_INT_CST_LOW (index)])));
10211 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10212 an integer constant or real constant.
10214 TYPE is the type of the result. */
10217 fold_negate_const (tree arg0, tree type)
10219 tree t = NULL_TREE;
10221 if (TREE_CODE (arg0) == INTEGER_CST)
10223 unsigned HOST_WIDE_INT low;
10224 HOST_WIDE_INT high;
10225 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10226 TREE_INT_CST_HIGH (arg0),
10228 t = build_int_cst_wide (type, low, high);
10229 t = force_fit_type (t, 1,
10230 (overflow | TREE_OVERFLOW (arg0))
10231 && !TYPE_UNSIGNED (type),
10232 TREE_CONSTANT_OVERFLOW (arg0));
10234 else if (TREE_CODE (arg0) == REAL_CST)
10235 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10236 #ifdef ENABLE_CHECKING
10244 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10245 an integer constant or real constant.
10247 TYPE is the type of the result. */
10250 fold_abs_const (tree arg0, tree type)
10252 tree t = NULL_TREE;
10254 if (TREE_CODE (arg0) == INTEGER_CST)
10256 /* If the value is unsigned, then the absolute value is
10257 the same as the ordinary value. */
10258 if (TYPE_UNSIGNED (type))
10260 /* Similarly, if the value is non-negative. */
10261 else if (INT_CST_LT (integer_minus_one_node, arg0))
10263 /* If the value is negative, then the absolute value is
10267 unsigned HOST_WIDE_INT low;
10268 HOST_WIDE_INT high;
10269 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10270 TREE_INT_CST_HIGH (arg0),
10272 t = build_int_cst_wide (type, low, high);
10273 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10274 TREE_CONSTANT_OVERFLOW (arg0));
10278 else if (TREE_CODE (arg0) == REAL_CST)
10280 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10281 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10285 #ifdef ENABLE_CHECKING
10293 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10294 constant. TYPE is the type of the result. */
10297 fold_not_const (tree arg0, tree type)
10299 tree t = NULL_TREE;
10301 if (TREE_CODE (arg0) == INTEGER_CST)
10303 t = build_int_cst_wide (type,
10304 ~ TREE_INT_CST_LOW (arg0),
10305 ~ TREE_INT_CST_HIGH (arg0));
10306 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10307 TREE_CONSTANT_OVERFLOW (arg0));
10309 #ifdef ENABLE_CHECKING
10317 /* Given CODE, a relational operator, the target type, TYPE and two
10318 constant operands OP0 and OP1, return the result of the
10319 relational operation. If the result is not a compile time
10320 constant, then return NULL_TREE. */
10323 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10325 int result, invert;
10327 /* From here on, the only cases we handle are when the result is
10328 known to be a constant. */
10330 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10332 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10333 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10335 /* Handle the cases where either operand is a NaN. */
10336 if (real_isnan (c0) || real_isnan (c1))
10346 case UNORDERED_EXPR:
10360 if (flag_trapping_math)
10369 return constant_boolean_node (result, type);
10372 return constant_boolean_node (real_compare (code, c0, c1), type);
10375 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10377 To compute GT, swap the arguments and do LT.
10378 To compute GE, do LT and invert the result.
10379 To compute LE, swap the arguments, do LT and invert the result.
10380 To compute NE, do EQ and invert the result.
10382 Therefore, the code below must handle only EQ and LT. */
10384 if (code == LE_EXPR || code == GT_EXPR)
10389 code = swap_tree_comparison (code);
10392 /* Note that it is safe to invert for real values here because we
10393 have already handled the one case that it matters. */
10396 if (code == NE_EXPR || code == GE_EXPR)
10399 code = invert_tree_comparison (code, false);
10402 /* Compute a result for LT or EQ if args permit;
10403 Otherwise return T. */
10404 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10406 if (code == EQ_EXPR)
10407 result = tree_int_cst_equal (op0, op1);
10408 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10409 result = INT_CST_LT_UNSIGNED (op0, op1);
10411 result = INT_CST_LT (op0, op1);
10418 return constant_boolean_node (result, type);
10421 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10422 avoid confusing the gimplify process. */
10425 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10427 if (TREE_CODE (t) == INDIRECT_REF)
10429 t = TREE_OPERAND (t, 0);
10430 if (TREE_TYPE (t) != ptrtype)
10431 t = build1 (NOP_EXPR, ptrtype, t);
10437 while (handled_component_p (base)
10438 || TREE_CODE (base) == REALPART_EXPR
10439 || TREE_CODE (base) == IMAGPART_EXPR)
10440 base = TREE_OPERAND (base, 0);
10442 TREE_ADDRESSABLE (base) = 1;
10444 t = build1 (ADDR_EXPR, ptrtype, t);
10451 build_fold_addr_expr (tree t)
10453 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10456 /* Builds an expression for an indirection through T, simplifying some
10460 build_fold_indirect_ref (tree t)
10462 tree type = TREE_TYPE (TREE_TYPE (t));
10467 if (TREE_CODE (sub) == ADDR_EXPR)
10469 tree op = TREE_OPERAND (sub, 0);
10470 tree optype = TREE_TYPE (op);
10472 if (lang_hooks.types_compatible_p (type, optype))
10474 /* *(foo *)&fooarray => fooarray[0] */
10475 else if (TREE_CODE (optype) == ARRAY_TYPE
10476 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10477 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10480 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10481 subtype = TREE_TYPE (sub);
10482 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10483 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10485 sub = build_fold_indirect_ref (sub);
10486 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10489 return build1 (INDIRECT_REF, type, t);
10492 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10493 whose result is ignored. The type of the returned tree need not be
10494 the same as the original expression. */
10497 fold_ignored_result (tree t)
10499 if (!TREE_SIDE_EFFECTS (t))
10500 return integer_zero_node;
10503 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10506 t = TREE_OPERAND (t, 0);
10511 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10512 t = TREE_OPERAND (t, 0);
10513 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10514 t = TREE_OPERAND (t, 1);
10520 switch (TREE_CODE (t))
10522 case COMPOUND_EXPR:
10523 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10525 t = TREE_OPERAND (t, 0);
10529 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10530 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10532 t = TREE_OPERAND (t, 0);
10545 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10546 This can only be applied to objects of a sizetype. */
10549 round_up (tree value, int divisor)
10551 tree div = NULL_TREE;
10558 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10559 have to do anything. Only do this when we are not given a const,
10560 because in that case, this check is more expensive than just
10562 if (TREE_CODE (value) != INTEGER_CST)
10564 div = build_int_cst (TREE_TYPE (value), divisor);
10566 if (multiple_of_p (TREE_TYPE (value), value, div))
10570 /* If divisor is a power of two, simplify this to bit manipulation. */
10571 if (divisor == (divisor & -divisor))
10575 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10576 value = size_binop (PLUS_EXPR, value, t);
10577 t = build_int_cst (TREE_TYPE (value), -divisor);
10578 value = size_binop (BIT_AND_EXPR, value, t);
10583 div = build_int_cst (TREE_TYPE (value), divisor);
10584 value = size_binop (CEIL_DIV_EXPR, value, div);
10585 value = size_binop (MULT_EXPR, value, div);
10591 /* Likewise, but round down. */
10594 round_down (tree value, int divisor)
10596 tree div = NULL_TREE;
10603 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10604 have to do anything. Only do this when we are not given a const,
10605 because in that case, this check is more expensive than just
10607 if (TREE_CODE (value) != INTEGER_CST)
10609 div = build_int_cst (TREE_TYPE (value), divisor);
10611 if (multiple_of_p (TREE_TYPE (value), value, div))
10615 /* If divisor is a power of two, simplify this to bit manipulation. */
10616 if (divisor == (divisor & -divisor))
10620 t = build_int_cst (TREE_TYPE (value), -divisor);
10621 value = size_binop (BIT_AND_EXPR, value, t);
10626 div = build_int_cst (TREE_TYPE (value), divisor);
10627 value = size_binop (FLOOR_DIV_EXPR, value, div);
10628 value = size_binop (MULT_EXPR, value, div);