1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static tree build_zero_vector (tree);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_relational_hi_lo (enum tree_code *, const tree,
139 static bool tree_expr_nonzero_p (tree);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec == 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
273 || (overflowable > 0 && sign_extended_type))
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
282 TREE_CONSTANT_OVERFLOW (t) = 1;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
303 h = h1 + h2 + (l < l1);
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
323 return (*hv & h1) < 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
360 for (j = 0; j < 4; j++)
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
410 if (SHIFT_COUNT_TRUNCATED)
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
465 unsigned HOST_WIDE_INT signmask;
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
471 if (SHIFT_COUNT_TRUNCATED)
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
481 else if (count >= HOST_BITS_PER_WIDE_INT)
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
615 /* This unsigned division rounds toward zero. */
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
668 { /* scale divisor and dividend */
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
747 decode (quo, lquo, hquo);
750 /* If result is negative, make it so. */
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, <wice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
838 /* Return true if built-in mathematical function specified by CODE
839 preserves the sign of it argument, i.e. -f(x) == f(-x). */
842 negate_mathfn_p (enum built_in_function code)
866 /* Check whether we may negate an integer constant T without causing
870 may_negate_without_overflow_p (tree t)
872 unsigned HOST_WIDE_INT val;
876 gcc_assert (TREE_CODE (t) == INTEGER_CST);
878 type = TREE_TYPE (t);
879 if (TYPE_UNSIGNED (type))
882 prec = TYPE_PRECISION (type);
883 if (prec > HOST_BITS_PER_WIDE_INT)
885 if (TREE_INT_CST_LOW (t) != 0)
887 prec -= HOST_BITS_PER_WIDE_INT;
888 val = TREE_INT_CST_HIGH (t);
891 val = TREE_INT_CST_LOW (t);
892 if (prec < HOST_BITS_PER_WIDE_INT)
893 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
894 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
897 /* Determine whether an expression T can be cheaply negated using
898 the function negate_expr. */
901 negate_expr_p (tree t)
908 type = TREE_TYPE (t);
911 switch (TREE_CODE (t))
914 if (TYPE_UNSIGNED (type) || ! flag_trapv)
917 /* Check that -CST will not overflow type. */
918 return may_negate_without_overflow_p (t);
925 return negate_expr_p (TREE_REALPART (t))
926 && negate_expr_p (TREE_IMAGPART (t));
929 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
931 /* -(A + B) -> (-B) - A. */
932 if (negate_expr_p (TREE_OPERAND (t, 1))
933 && reorder_operands_p (TREE_OPERAND (t, 0),
934 TREE_OPERAND (t, 1)))
936 /* -(A + B) -> (-A) - B. */
937 return negate_expr_p (TREE_OPERAND (t, 0));
940 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
941 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
942 && reorder_operands_p (TREE_OPERAND (t, 0),
943 TREE_OPERAND (t, 1));
946 if (TYPE_UNSIGNED (TREE_TYPE (t)))
952 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
953 return negate_expr_p (TREE_OPERAND (t, 1))
954 || negate_expr_p (TREE_OPERAND (t, 0));
958 /* Negate -((double)float) as (double)(-float). */
959 if (TREE_CODE (type) == REAL_TYPE)
961 tree tem = strip_float_extensions (t);
963 return negate_expr_p (tem);
968 /* Negate -f(x) as f(-x). */
969 if (negate_mathfn_p (builtin_mathfn_code (t)))
970 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
974 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
975 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
977 tree op1 = TREE_OPERAND (t, 1);
978 if (TREE_INT_CST_HIGH (op1) == 0
979 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
980 == TREE_INT_CST_LOW (op1))
991 /* Given T, an expression, return the negation of T. Allow for T to be
992 null, in which case return null. */
1003 type = TREE_TYPE (t);
1004 STRIP_SIGN_NOPS (t);
1006 switch (TREE_CODE (t))
1009 tem = fold_negate_const (t, type);
1010 if (! TREE_OVERFLOW (tem)
1011 || TYPE_UNSIGNED (type)
1017 tem = fold_negate_const (t, type);
1018 /* Two's complement FP formats, such as c4x, may overflow. */
1019 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1020 return fold_convert (type, tem);
1025 tree rpart = negate_expr (TREE_REALPART (t));
1026 tree ipart = negate_expr (TREE_IMAGPART (t));
1028 if ((TREE_CODE (rpart) == REAL_CST
1029 && TREE_CODE (ipart) == REAL_CST)
1030 || (TREE_CODE (rpart) == INTEGER_CST
1031 && TREE_CODE (ipart) == INTEGER_CST))
1032 return build_complex (type, rpart, ipart);
1037 return fold_convert (type, TREE_OPERAND (t, 0));
1040 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1042 /* -(A + B) -> (-B) - A. */
1043 if (negate_expr_p (TREE_OPERAND (t, 1))
1044 && reorder_operands_p (TREE_OPERAND (t, 0),
1045 TREE_OPERAND (t, 1)))
1047 tem = negate_expr (TREE_OPERAND (t, 1));
1048 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1049 tem, TREE_OPERAND (t, 0)));
1050 return fold_convert (type, tem);
1053 /* -(A + B) -> (-A) - B. */
1054 if (negate_expr_p (TREE_OPERAND (t, 0)))
1056 tem = negate_expr (TREE_OPERAND (t, 0));
1057 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1058 tem, TREE_OPERAND (t, 1)));
1059 return fold_convert (type, tem);
1065 /* - (A - B) -> B - A */
1066 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1067 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1068 return fold_convert (type,
1069 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1070 TREE_OPERAND (t, 1),
1071 TREE_OPERAND (t, 0))));
1075 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1081 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1083 tem = TREE_OPERAND (t, 1);
1084 if (negate_expr_p (tem))
1085 return fold_convert (type,
1086 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1087 TREE_OPERAND (t, 0),
1088 negate_expr (tem))));
1089 tem = TREE_OPERAND (t, 0);
1090 if (negate_expr_p (tem))
1091 return fold_convert (type,
1092 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1094 TREE_OPERAND (t, 1))));
1099 /* Convert -((double)float) into (double)(-float). */
1100 if (TREE_CODE (type) == REAL_TYPE)
1102 tem = strip_float_extensions (t);
1103 if (tem != t && negate_expr_p (tem))
1104 return fold_convert (type, negate_expr (tem));
1109 /* Negate -f(x) as f(-x). */
1110 if (negate_mathfn_p (builtin_mathfn_code (t))
1111 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1113 tree fndecl, arg, arglist;
1115 fndecl = get_callee_fndecl (t);
1116 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1117 arglist = build_tree_list (NULL_TREE, arg);
1118 return build_function_call_expr (fndecl, arglist);
1123 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1124 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1126 tree op1 = TREE_OPERAND (t, 1);
1127 if (TREE_INT_CST_HIGH (op1) == 0
1128 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1129 == TREE_INT_CST_LOW (op1))
1131 tree ntype = TYPE_UNSIGNED (type)
1132 ? lang_hooks.types.signed_type (type)
1133 : lang_hooks.types.unsigned_type (type);
1134 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1135 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1136 return fold_convert (type, temp);
1145 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1146 return fold_convert (type, tem);
1149 /* Split a tree IN into a constant, literal and variable parts that could be
1150 combined with CODE to make IN. "constant" means an expression with
1151 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1152 commutative arithmetic operation. Store the constant part into *CONP,
1153 the literal in *LITP and return the variable part. If a part isn't
1154 present, set it to null. If the tree does not decompose in this way,
1155 return the entire tree as the variable part and the other parts as null.
1157 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1158 case, we negate an operand that was subtracted. Except if it is a
1159 literal for which we use *MINUS_LITP instead.
1161 If NEGATE_P is true, we are negating all of IN, again except a literal
1162 for which we use *MINUS_LITP instead.
1164 If IN is itself a literal or constant, return it as appropriate.
1166 Note that we do not guarantee that any of the three values will be the
1167 same type as IN, but they will have the same signedness and mode. */
1170 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1171 tree *minus_litp, int negate_p)
1179 /* Strip any conversions that don't change the machine mode or signedness. */
1180 STRIP_SIGN_NOPS (in);
1182 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1184 else if (TREE_CODE (in) == code
1185 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1186 /* We can associate addition and subtraction together (even
1187 though the C standard doesn't say so) for integers because
1188 the value is not affected. For reals, the value might be
1189 affected, so we can't. */
1190 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1191 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1193 tree op0 = TREE_OPERAND (in, 0);
1194 tree op1 = TREE_OPERAND (in, 1);
1195 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1196 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1198 /* First see if either of the operands is a literal, then a constant. */
1199 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1200 *litp = op0, op0 = 0;
1201 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1202 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1204 if (op0 != 0 && TREE_CONSTANT (op0))
1205 *conp = op0, op0 = 0;
1206 else if (op1 != 0 && TREE_CONSTANT (op1))
1207 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1209 /* If we haven't dealt with either operand, this is not a case we can
1210 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1211 if (op0 != 0 && op1 != 0)
1216 var = op1, neg_var_p = neg1_p;
1218 /* Now do any needed negations. */
1220 *minus_litp = *litp, *litp = 0;
1222 *conp = negate_expr (*conp);
1224 var = negate_expr (var);
1226 else if (TREE_CONSTANT (in))
1234 *minus_litp = *litp, *litp = 0;
1235 else if (*minus_litp)
1236 *litp = *minus_litp, *minus_litp = 0;
1237 *conp = negate_expr (*conp);
1238 var = negate_expr (var);
1244 /* Re-associate trees split by the above function. T1 and T2 are either
1245 expressions to associate or null. Return the new expression, if any. If
1246 we build an operation, do it in TYPE and with CODE. */
1249 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1256 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1257 try to fold this since we will have infinite recursion. But do
1258 deal with any NEGATE_EXPRs. */
1259 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1260 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1262 if (code == PLUS_EXPR)
1264 if (TREE_CODE (t1) == NEGATE_EXPR)
1265 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1266 fold_convert (type, TREE_OPERAND (t1, 0)));
1267 else if (TREE_CODE (t2) == NEGATE_EXPR)
1268 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1269 fold_convert (type, TREE_OPERAND (t2, 0)));
1270 else if (integer_zerop (t2))
1271 return fold_convert (type, t1);
1273 else if (code == MINUS_EXPR)
1275 if (integer_zerop (t2))
1276 return fold_convert (type, t1);
1279 return build2 (code, type, fold_convert (type, t1),
1280 fold_convert (type, t2));
1283 return fold (build2 (code, type, fold_convert (type, t1),
1284 fold_convert (type, t2)));
1287 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1288 to produce a new constant.
1290 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1293 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1295 unsigned HOST_WIDE_INT int1l, int2l;
1296 HOST_WIDE_INT int1h, int2h;
1297 unsigned HOST_WIDE_INT low;
1299 unsigned HOST_WIDE_INT garbagel;
1300 HOST_WIDE_INT garbageh;
1302 tree type = TREE_TYPE (arg1);
1303 int uns = TYPE_UNSIGNED (type);
1305 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1307 int no_overflow = 0;
1309 int1l = TREE_INT_CST_LOW (arg1);
1310 int1h = TREE_INT_CST_HIGH (arg1);
1311 int2l = TREE_INT_CST_LOW (arg2);
1312 int2h = TREE_INT_CST_HIGH (arg2);
1317 low = int1l | int2l, hi = int1h | int2h;
1321 low = int1l ^ int2l, hi = int1h ^ int2h;
1325 low = int1l & int2l, hi = int1h & int2h;
1331 /* It's unclear from the C standard whether shifts can overflow.
1332 The following code ignores overflow; perhaps a C standard
1333 interpretation ruling is needed. */
1334 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1342 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1347 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1351 neg_double (int2l, int2h, &low, &hi);
1352 add_double (int1l, int1h, low, hi, &low, &hi);
1353 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1357 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1360 case TRUNC_DIV_EXPR:
1361 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1362 case EXACT_DIV_EXPR:
1363 /* This is a shortcut for a common special case. */
1364 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1365 && ! TREE_CONSTANT_OVERFLOW (arg1)
1366 && ! TREE_CONSTANT_OVERFLOW (arg2)
1367 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1369 if (code == CEIL_DIV_EXPR)
1372 low = int1l / int2l, hi = 0;
1376 /* ... fall through ... */
1378 case ROUND_DIV_EXPR:
1379 if (int2h == 0 && int2l == 1)
1381 low = int1l, hi = int1h;
1384 if (int1l == int2l && int1h == int2h
1385 && ! (int1l == 0 && int1h == 0))
1390 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1391 &low, &hi, &garbagel, &garbageh);
1394 case TRUNC_MOD_EXPR:
1395 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1396 /* This is a shortcut for a common special case. */
1397 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1398 && ! TREE_CONSTANT_OVERFLOW (arg1)
1399 && ! TREE_CONSTANT_OVERFLOW (arg2)
1400 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1402 if (code == CEIL_MOD_EXPR)
1404 low = int1l % int2l, hi = 0;
1408 /* ... fall through ... */
1410 case ROUND_MOD_EXPR:
1411 overflow = div_and_round_double (code, uns,
1412 int1l, int1h, int2l, int2h,
1413 &garbagel, &garbageh, &low, &hi);
1419 low = (((unsigned HOST_WIDE_INT) int1h
1420 < (unsigned HOST_WIDE_INT) int2h)
1421 || (((unsigned HOST_WIDE_INT) int1h
1422 == (unsigned HOST_WIDE_INT) int2h)
1425 low = (int1h < int2h
1426 || (int1h == int2h && int1l < int2l));
1428 if (low == (code == MIN_EXPR))
1429 low = int1l, hi = int1h;
1431 low = int2l, hi = int2h;
1438 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1442 /* Propagate overflow flags ourselves. */
1443 if (((!uns || is_sizetype) && overflow)
1444 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1447 TREE_OVERFLOW (t) = 1;
1448 TREE_CONSTANT_OVERFLOW (t) = 1;
1450 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1453 TREE_CONSTANT_OVERFLOW (t) = 1;
1457 t = force_fit_type (t, 1,
1458 ((!uns || is_sizetype) && overflow)
1459 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1460 TREE_CONSTANT_OVERFLOW (arg1)
1461 | TREE_CONSTANT_OVERFLOW (arg2));
1466 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1467 constant. We assume ARG1 and ARG2 have the same data type, or at least
1468 are the same kind of constant and the same machine mode.
1470 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1473 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1478 if (TREE_CODE (arg1) == INTEGER_CST)
1479 return int_const_binop (code, arg1, arg2, notrunc);
1481 if (TREE_CODE (arg1) == REAL_CST)
1483 enum machine_mode mode;
1486 REAL_VALUE_TYPE value;
1489 d1 = TREE_REAL_CST (arg1);
1490 d2 = TREE_REAL_CST (arg2);
1492 type = TREE_TYPE (arg1);
1493 mode = TYPE_MODE (type);
1495 /* Don't perform operation if we honor signaling NaNs and
1496 either operand is a NaN. */
1497 if (HONOR_SNANS (mode)
1498 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1501 /* Don't perform operation if it would raise a division
1502 by zero exception. */
1503 if (code == RDIV_EXPR
1504 && REAL_VALUES_EQUAL (d2, dconst0)
1505 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1508 /* If either operand is a NaN, just return it. Otherwise, set up
1509 for floating-point trap; we return an overflow. */
1510 if (REAL_VALUE_ISNAN (d1))
1512 else if (REAL_VALUE_ISNAN (d2))
1515 REAL_ARITHMETIC (value, code, d1, d2);
1517 t = build_real (type, real_value_truncate (mode, value));
1519 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1520 TREE_CONSTANT_OVERFLOW (t)
1522 | TREE_CONSTANT_OVERFLOW (arg1)
1523 | TREE_CONSTANT_OVERFLOW (arg2);
1526 if (TREE_CODE (arg1) == COMPLEX_CST)
1528 tree type = TREE_TYPE (arg1);
1529 tree r1 = TREE_REALPART (arg1);
1530 tree i1 = TREE_IMAGPART (arg1);
1531 tree r2 = TREE_REALPART (arg2);
1532 tree i2 = TREE_IMAGPART (arg2);
1538 t = build_complex (type,
1539 const_binop (PLUS_EXPR, r1, r2, notrunc),
1540 const_binop (PLUS_EXPR, i1, i2, notrunc));
1544 t = build_complex (type,
1545 const_binop (MINUS_EXPR, r1, r2, notrunc),
1546 const_binop (MINUS_EXPR, i1, i2, notrunc));
1550 t = build_complex (type,
1551 const_binop (MINUS_EXPR,
1552 const_binop (MULT_EXPR,
1554 const_binop (MULT_EXPR,
1557 const_binop (PLUS_EXPR,
1558 const_binop (MULT_EXPR,
1560 const_binop (MULT_EXPR,
1568 = const_binop (PLUS_EXPR,
1569 const_binop (MULT_EXPR, r2, r2, notrunc),
1570 const_binop (MULT_EXPR, i2, i2, notrunc),
1573 t = build_complex (type,
1575 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1576 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1577 const_binop (PLUS_EXPR,
1578 const_binop (MULT_EXPR, r1, r2,
1580 const_binop (MULT_EXPR, i1, i2,
1583 magsquared, notrunc),
1585 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1586 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1587 const_binop (MINUS_EXPR,
1588 const_binop (MULT_EXPR, i1, r2,
1590 const_binop (MULT_EXPR, r1, i2,
1593 magsquared, notrunc));
1605 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1606 indicates which particular sizetype to create. */
1609 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1611 return build_int_cst (sizetype_tab[(int) kind], number);
1614 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1615 is a tree code. The type of the result is taken from the operands.
1616 Both must be the same type integer type and it must be a size type.
1617 If the operands are constant, so is the result. */
1620 size_binop (enum tree_code code, tree arg0, tree arg1)
1622 tree type = TREE_TYPE (arg0);
1624 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1625 && type == TREE_TYPE (arg1));
1627 /* Handle the special case of two integer constants faster. */
1628 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1630 /* And some specific cases even faster than that. */
1631 if (code == PLUS_EXPR && integer_zerop (arg0))
1633 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1634 && integer_zerop (arg1))
1636 else if (code == MULT_EXPR && integer_onep (arg0))
1639 /* Handle general case of two integer constants. */
1640 return int_const_binop (code, arg0, arg1, 0);
1643 if (arg0 == error_mark_node || arg1 == error_mark_node)
1644 return error_mark_node;
1646 return fold (build2 (code, type, arg0, arg1));
1649 /* Given two values, either both of sizetype or both of bitsizetype,
1650 compute the difference between the two values. Return the value
1651 in signed type corresponding to the type of the operands. */
1654 size_diffop (tree arg0, tree arg1)
1656 tree type = TREE_TYPE (arg0);
1659 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1660 && type == TREE_TYPE (arg1));
1662 /* If the type is already signed, just do the simple thing. */
1663 if (!TYPE_UNSIGNED (type))
1664 return size_binop (MINUS_EXPR, arg0, arg1);
1666 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1668 /* If either operand is not a constant, do the conversions to the signed
1669 type and subtract. The hardware will do the right thing with any
1670 overflow in the subtraction. */
1671 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1672 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1673 fold_convert (ctype, arg1));
1675 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1676 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1677 overflow) and negate (which can't either). Special-case a result
1678 of zero while we're here. */
1679 if (tree_int_cst_equal (arg0, arg1))
1680 return fold_convert (ctype, integer_zero_node);
1681 else if (tree_int_cst_lt (arg1, arg0))
1682 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1684 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1685 fold_convert (ctype, size_binop (MINUS_EXPR,
1689 /* Construct a vector of zero elements of vector type TYPE. */
1692 build_zero_vector (tree type)
1697 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1698 units = TYPE_VECTOR_SUBPARTS (type);
1701 for (i = 0; i < units; i++)
1702 list = tree_cons (NULL_TREE, elem, list);
1703 return build_vector (type, list);
1707 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1708 type TYPE. If no simplification can be done return NULL_TREE. */
1711 fold_convert_const (enum tree_code code, tree type, tree arg1)
1716 if (TREE_TYPE (arg1) == type)
1719 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1721 if (TREE_CODE (arg1) == INTEGER_CST)
1723 /* If we would build a constant wider than GCC supports,
1724 leave the conversion unfolded. */
1725 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1728 /* Given an integer constant, make new constant with new type,
1729 appropriately sign-extended or truncated. */
1730 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1731 TREE_INT_CST_HIGH (arg1));
1733 t = force_fit_type (t,
1734 /* Don't set the overflow when
1735 converting a pointer */
1736 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1737 (TREE_INT_CST_HIGH (arg1) < 0
1738 && (TYPE_UNSIGNED (type)
1739 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1740 | TREE_OVERFLOW (arg1),
1741 TREE_CONSTANT_OVERFLOW (arg1));
1744 else if (TREE_CODE (arg1) == REAL_CST)
1746 /* The following code implements the floating point to integer
1747 conversion rules required by the Java Language Specification,
1748 that IEEE NaNs are mapped to zero and values that overflow
1749 the target precision saturate, i.e. values greater than
1750 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1751 are mapped to INT_MIN. These semantics are allowed by the
1752 C and C++ standards that simply state that the behavior of
1753 FP-to-integer conversion is unspecified upon overflow. */
1755 HOST_WIDE_INT high, low;
1757 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1761 case FIX_TRUNC_EXPR:
1762 real_trunc (&r, VOIDmode, &x);
1766 real_ceil (&r, VOIDmode, &x);
1769 case FIX_FLOOR_EXPR:
1770 real_floor (&r, VOIDmode, &x);
1773 case FIX_ROUND_EXPR:
1774 real_round (&r, VOIDmode, &x);
1781 /* If R is NaN, return zero and show we have an overflow. */
1782 if (REAL_VALUE_ISNAN (r))
1789 /* See if R is less than the lower bound or greater than the
1794 tree lt = TYPE_MIN_VALUE (type);
1795 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1796 if (REAL_VALUES_LESS (r, l))
1799 high = TREE_INT_CST_HIGH (lt);
1800 low = TREE_INT_CST_LOW (lt);
1806 tree ut = TYPE_MAX_VALUE (type);
1809 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1810 if (REAL_VALUES_LESS (u, r))
1813 high = TREE_INT_CST_HIGH (ut);
1814 low = TREE_INT_CST_LOW (ut);
1820 REAL_VALUE_TO_INT (&low, &high, r);
1822 t = build_int_cst_wide (type, low, high);
1824 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1825 TREE_CONSTANT_OVERFLOW (arg1));
1829 else if (TREE_CODE (type) == REAL_TYPE)
1831 if (TREE_CODE (arg1) == INTEGER_CST)
1832 return build_real_from_int_cst (type, arg1);
1833 if (TREE_CODE (arg1) == REAL_CST)
1835 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1837 /* We make a copy of ARG1 so that we don't modify an
1838 existing constant tree. */
1839 t = copy_node (arg1);
1840 TREE_TYPE (t) = type;
1844 t = build_real (type,
1845 real_value_truncate (TYPE_MODE (type),
1846 TREE_REAL_CST (arg1)));
1848 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1849 TREE_CONSTANT_OVERFLOW (t)
1850 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1861 fold_convert (tree type, tree arg)
1863 tree orig = TREE_TYPE (arg);
1869 if (TREE_CODE (arg) == ERROR_MARK
1870 || TREE_CODE (type) == ERROR_MARK
1871 || TREE_CODE (orig) == ERROR_MARK)
1872 return error_mark_node;
1874 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1875 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1876 TYPE_MAIN_VARIANT (orig)))
1877 return fold (build1 (NOP_EXPR, type, arg));
1879 switch (TREE_CODE (type))
1881 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1882 case POINTER_TYPE: case REFERENCE_TYPE:
1884 if (TREE_CODE (arg) == INTEGER_CST)
1886 tem = fold_convert_const (NOP_EXPR, type, arg);
1887 if (tem != NULL_TREE)
1890 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1891 || TREE_CODE (orig) == OFFSET_TYPE)
1892 return fold (build1 (NOP_EXPR, type, arg));
1893 if (TREE_CODE (orig) == COMPLEX_TYPE)
1895 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1896 return fold_convert (type, tem);
1898 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1899 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1900 return fold (build1 (NOP_EXPR, type, arg));
1903 if (TREE_CODE (arg) == INTEGER_CST)
1905 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1906 if (tem != NULL_TREE)
1909 else if (TREE_CODE (arg) == REAL_CST)
1911 tem = fold_convert_const (NOP_EXPR, type, arg);
1912 if (tem != NULL_TREE)
1916 switch (TREE_CODE (orig))
1918 case INTEGER_TYPE: case CHAR_TYPE:
1919 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1920 case POINTER_TYPE: case REFERENCE_TYPE:
1921 return fold (build1 (FLOAT_EXPR, type, arg));
1924 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1928 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1929 return fold_convert (type, tem);
1936 switch (TREE_CODE (orig))
1938 case INTEGER_TYPE: case CHAR_TYPE:
1939 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1940 case POINTER_TYPE: case REFERENCE_TYPE:
1942 return build2 (COMPLEX_EXPR, type,
1943 fold_convert (TREE_TYPE (type), arg),
1944 fold_convert (TREE_TYPE (type), integer_zero_node));
1949 if (TREE_CODE (arg) == COMPLEX_EXPR)
1951 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1952 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1953 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1956 arg = save_expr (arg);
1957 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1958 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1959 rpart = fold_convert (TREE_TYPE (type), rpart);
1960 ipart = fold_convert (TREE_TYPE (type), ipart);
1961 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1969 if (integer_zerop (arg))
1970 return build_zero_vector (type);
1971 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1972 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1973 || TREE_CODE (orig) == VECTOR_TYPE);
1974 return fold (build1 (NOP_EXPR, type, arg));
1977 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
1984 /* Return an expr equal to X but certainly not valid as an lvalue. */
1989 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
1994 /* We only need to wrap lvalue tree codes. */
1995 switch (TREE_CODE (x))
2006 case ALIGN_INDIRECT_REF:
2007 case MISALIGNED_INDIRECT_REF:
2009 case ARRAY_RANGE_REF:
2015 case PREINCREMENT_EXPR:
2016 case PREDECREMENT_EXPR:
2018 case TRY_CATCH_EXPR:
2019 case WITH_CLEANUP_EXPR:
2030 /* Assume the worst for front-end tree codes. */
2031 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2035 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2038 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2039 Zero means allow extended lvalues. */
2041 int pedantic_lvalues;
2043 /* When pedantic, return an expr equal to X but certainly not valid as a
2044 pedantic lvalue. Otherwise, return X. */
2047 pedantic_non_lvalue (tree x)
2049 if (pedantic_lvalues)
2050 return non_lvalue (x);
2055 /* Given a tree comparison code, return the code that is the logical inverse
2056 of the given code. It is not safe to do this for floating-point
2057 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2058 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2060 static enum tree_code
2061 invert_tree_comparison (enum tree_code code, bool honor_nans)
2063 if (honor_nans && flag_trapping_math)
2073 return honor_nans ? UNLE_EXPR : LE_EXPR;
2075 return honor_nans ? UNLT_EXPR : LT_EXPR;
2077 return honor_nans ? UNGE_EXPR : GE_EXPR;
2079 return honor_nans ? UNGT_EXPR : GT_EXPR;
2093 return UNORDERED_EXPR;
2094 case UNORDERED_EXPR:
2095 return ORDERED_EXPR;
2101 /* Similar, but return the comparison that results if the operands are
2102 swapped. This is safe for floating-point. */
2105 swap_tree_comparison (enum tree_code code)
2126 /* Convert a comparison tree code from an enum tree_code representation
2127 into a compcode bit-based encoding. This function is the inverse of
2128 compcode_to_comparison. */
2130 static enum comparison_code
2131 comparison_to_compcode (enum tree_code code)
2148 return COMPCODE_ORD;
2149 case UNORDERED_EXPR:
2150 return COMPCODE_UNORD;
2152 return COMPCODE_UNLT;
2154 return COMPCODE_UNEQ;
2156 return COMPCODE_UNLE;
2158 return COMPCODE_UNGT;
2160 return COMPCODE_LTGT;
2162 return COMPCODE_UNGE;
2168 /* Convert a compcode bit-based encoding of a comparison operator back
2169 to GCC's enum tree_code representation. This function is the
2170 inverse of comparison_to_compcode. */
2172 static enum tree_code
2173 compcode_to_comparison (enum comparison_code code)
2190 return ORDERED_EXPR;
2191 case COMPCODE_UNORD:
2192 return UNORDERED_EXPR;
2210 /* Return a tree for the comparison which is the combination of
2211 doing the AND or OR (depending on CODE) of the two operations LCODE
2212 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2213 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2214 if this makes the transformation invalid. */
2217 combine_comparisons (enum tree_code code, enum tree_code lcode,
2218 enum tree_code rcode, tree truth_type,
2219 tree ll_arg, tree lr_arg)
2221 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2222 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2223 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2224 enum comparison_code compcode;
2228 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2229 compcode = lcompcode & rcompcode;
2232 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2233 compcode = lcompcode | rcompcode;
2242 /* Eliminate unordered comparisons, as well as LTGT and ORD
2243 which are not used unless the mode has NaNs. */
2244 compcode &= ~COMPCODE_UNORD;
2245 if (compcode == COMPCODE_LTGT)
2246 compcode = COMPCODE_NE;
2247 else if (compcode == COMPCODE_ORD)
2248 compcode = COMPCODE_TRUE;
2250 else if (flag_trapping_math)
2252 /* Check that the original operation and the optimized ones will trap
2253 under the same condition. */
2254 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2255 && (lcompcode != COMPCODE_EQ)
2256 && (lcompcode != COMPCODE_ORD);
2257 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2258 && (rcompcode != COMPCODE_EQ)
2259 && (rcompcode != COMPCODE_ORD);
2260 bool trap = (compcode & COMPCODE_UNORD) == 0
2261 && (compcode != COMPCODE_EQ)
2262 && (compcode != COMPCODE_ORD);
2264 /* In a short-circuited boolean expression the LHS might be
2265 such that the RHS, if evaluated, will never trap. For
2266 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2267 if neither x nor y is NaN. (This is a mixed blessing: for
2268 example, the expression above will never trap, hence
2269 optimizing it to x < y would be invalid). */
2270 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2271 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2274 /* If the comparison was short-circuited, and only the RHS
2275 trapped, we may now generate a spurious trap. */
2277 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2280 /* If we changed the conditions that cause a trap, we lose. */
2281 if ((ltrap || rtrap) != trap)
2285 if (compcode == COMPCODE_TRUE)
2286 return constant_boolean_node (true, truth_type);
2287 else if (compcode == COMPCODE_FALSE)
2288 return constant_boolean_node (false, truth_type);
2290 return fold (build2 (compcode_to_comparison (compcode),
2291 truth_type, ll_arg, lr_arg));
2294 /* Return nonzero if CODE is a tree code that represents a truth value. */
2297 truth_value_p (enum tree_code code)
2299 return (TREE_CODE_CLASS (code) == tcc_comparison
2300 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2301 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2302 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2305 /* Return nonzero if two operands (typically of the same tree node)
2306 are necessarily equal. If either argument has side-effects this
2307 function returns zero. FLAGS modifies behavior as follows:
2309 If OEP_ONLY_CONST is set, only return nonzero for constants.
2310 This function tests whether the operands are indistinguishable;
2311 it does not test whether they are equal using C's == operation.
2312 The distinction is important for IEEE floating point, because
2313 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2314 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2316 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2317 even though it may hold multiple values during a function.
2318 This is because a GCC tree node guarantees that nothing else is
2319 executed between the evaluation of its "operands" (which may often
2320 be evaluated in arbitrary order). Hence if the operands themselves
2321 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2322 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2323 unset means assuming isochronic (or instantaneous) tree equivalence.
2324 Unless comparing arbitrary expression trees, such as from different
2325 statements, this flag can usually be left unset.
2327 If OEP_PURE_SAME is set, then pure functions with identical arguments
2328 are considered the same. It is used when the caller has other ways
2329 to ensure that global memory is unchanged in between. */
2332 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2334 /* If either is ERROR_MARK, they aren't equal. */
2335 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2338 /* If both types don't have the same signedness, then we can't consider
2339 them equal. We must check this before the STRIP_NOPS calls
2340 because they may change the signedness of the arguments. */
2341 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2347 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2348 /* This is needed for conversions and for COMPONENT_REF.
2349 Might as well play it safe and always test this. */
2350 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2351 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2352 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2355 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2356 We don't care about side effects in that case because the SAVE_EXPR
2357 takes care of that for us. In all other cases, two expressions are
2358 equal if they have no side effects. If we have two identical
2359 expressions with side effects that should be treated the same due
2360 to the only side effects being identical SAVE_EXPR's, that will
2361 be detected in the recursive calls below. */
2362 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2363 && (TREE_CODE (arg0) == SAVE_EXPR
2364 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2367 /* Next handle constant cases, those for which we can return 1 even
2368 if ONLY_CONST is set. */
2369 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2370 switch (TREE_CODE (arg0))
2373 return (! TREE_CONSTANT_OVERFLOW (arg0)
2374 && ! TREE_CONSTANT_OVERFLOW (arg1)
2375 && tree_int_cst_equal (arg0, arg1));
2378 return (! TREE_CONSTANT_OVERFLOW (arg0)
2379 && ! TREE_CONSTANT_OVERFLOW (arg1)
2380 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2381 TREE_REAL_CST (arg1)));
2387 if (TREE_CONSTANT_OVERFLOW (arg0)
2388 || TREE_CONSTANT_OVERFLOW (arg1))
2391 v1 = TREE_VECTOR_CST_ELTS (arg0);
2392 v2 = TREE_VECTOR_CST_ELTS (arg1);
2395 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2398 v1 = TREE_CHAIN (v1);
2399 v2 = TREE_CHAIN (v2);
2406 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2408 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2412 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2413 && ! memcmp (TREE_STRING_POINTER (arg0),
2414 TREE_STRING_POINTER (arg1),
2415 TREE_STRING_LENGTH (arg0)));
2418 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2424 if (flags & OEP_ONLY_CONST)
2427 /* Define macros to test an operand from arg0 and arg1 for equality and a
2428 variant that allows null and views null as being different from any
2429 non-null value. In the latter case, if either is null, the both
2430 must be; otherwise, do the normal comparison. */
2431 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2432 TREE_OPERAND (arg1, N), flags)
2434 #define OP_SAME_WITH_NULL(N) \
2435 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2436 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2438 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2441 /* Two conversions are equal only if signedness and modes match. */
2442 switch (TREE_CODE (arg0))
2447 case FIX_TRUNC_EXPR:
2448 case FIX_FLOOR_EXPR:
2449 case FIX_ROUND_EXPR:
2450 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2451 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2461 case tcc_comparison:
2463 if (OP_SAME (0) && OP_SAME (1))
2466 /* For commutative ops, allow the other order. */
2467 return (commutative_tree_code (TREE_CODE (arg0))
2468 && operand_equal_p (TREE_OPERAND (arg0, 0),
2469 TREE_OPERAND (arg1, 1), flags)
2470 && operand_equal_p (TREE_OPERAND (arg0, 1),
2471 TREE_OPERAND (arg1, 0), flags));
2474 /* If either of the pointer (or reference) expressions we are
2475 dereferencing contain a side effect, these cannot be equal. */
2476 if (TREE_SIDE_EFFECTS (arg0)
2477 || TREE_SIDE_EFFECTS (arg1))
2480 switch (TREE_CODE (arg0))
2483 case ALIGN_INDIRECT_REF:
2484 case MISALIGNED_INDIRECT_REF:
2490 case ARRAY_RANGE_REF:
2491 /* Operands 2 and 3 may be null. */
2494 && OP_SAME_WITH_NULL (2)
2495 && OP_SAME_WITH_NULL (3));
2498 /* Handle operand 2 the same as for ARRAY_REF. */
2499 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2502 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2508 case tcc_expression:
2509 switch (TREE_CODE (arg0))
2512 case TRUTH_NOT_EXPR:
2515 case TRUTH_ANDIF_EXPR:
2516 case TRUTH_ORIF_EXPR:
2517 return OP_SAME (0) && OP_SAME (1);
2519 case TRUTH_AND_EXPR:
2521 case TRUTH_XOR_EXPR:
2522 if (OP_SAME (0) && OP_SAME (1))
2525 /* Otherwise take into account this is a commutative operation. */
2526 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2527 TREE_OPERAND (arg1, 1), flags)
2528 && operand_equal_p (TREE_OPERAND (arg0, 1),
2529 TREE_OPERAND (arg1, 0), flags));
2532 /* If the CALL_EXPRs call different functions, then they
2533 clearly can not be equal. */
2538 unsigned int cef = call_expr_flags (arg0);
2539 if (flags & OEP_PURE_SAME)
2540 cef &= ECF_CONST | ECF_PURE;
2547 /* Now see if all the arguments are the same. operand_equal_p
2548 does not handle TREE_LIST, so we walk the operands here
2549 feeding them to operand_equal_p. */
2550 arg0 = TREE_OPERAND (arg0, 1);
2551 arg1 = TREE_OPERAND (arg1, 1);
2552 while (arg0 && arg1)
2554 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2558 arg0 = TREE_CHAIN (arg0);
2559 arg1 = TREE_CHAIN (arg1);
2562 /* If we get here and both argument lists are exhausted
2563 then the CALL_EXPRs are equal. */
2564 return ! (arg0 || arg1);
2570 case tcc_declaration:
2571 /* Consider __builtin_sqrt equal to sqrt. */
2572 return (TREE_CODE (arg0) == FUNCTION_DECL
2573 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2574 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2575 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2582 #undef OP_SAME_WITH_NULL
2585 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2586 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2588 When in doubt, return 0. */
2591 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2593 int unsignedp1, unsignedpo;
2594 tree primarg0, primarg1, primother;
2595 unsigned int correct_width;
2597 if (operand_equal_p (arg0, arg1, 0))
2600 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2601 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2604 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2605 and see if the inner values are the same. This removes any
2606 signedness comparison, which doesn't matter here. */
2607 primarg0 = arg0, primarg1 = arg1;
2608 STRIP_NOPS (primarg0);
2609 STRIP_NOPS (primarg1);
2610 if (operand_equal_p (primarg0, primarg1, 0))
2613 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2614 actual comparison operand, ARG0.
2616 First throw away any conversions to wider types
2617 already present in the operands. */
2619 primarg1 = get_narrower (arg1, &unsignedp1);
2620 primother = get_narrower (other, &unsignedpo);
2622 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2623 if (unsignedp1 == unsignedpo
2624 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2625 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2627 tree type = TREE_TYPE (arg0);
2629 /* Make sure shorter operand is extended the right way
2630 to match the longer operand. */
2631 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2632 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2634 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2641 /* See if ARG is an expression that is either a comparison or is performing
2642 arithmetic on comparisons. The comparisons must only be comparing
2643 two different values, which will be stored in *CVAL1 and *CVAL2; if
2644 they are nonzero it means that some operands have already been found.
2645 No variables may be used anywhere else in the expression except in the
2646 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2647 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2649 If this is true, return 1. Otherwise, return zero. */
2652 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2654 enum tree_code code = TREE_CODE (arg);
2655 enum tree_code_class class = TREE_CODE_CLASS (code);
2657 /* We can handle some of the tcc_expression cases here. */
2658 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2660 else if (class == tcc_expression
2661 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2662 || code == COMPOUND_EXPR))
2665 else if (class == tcc_expression && code == SAVE_EXPR
2666 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2668 /* If we've already found a CVAL1 or CVAL2, this expression is
2669 two complex to handle. */
2670 if (*cval1 || *cval2)
2680 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2683 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2684 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2685 cval1, cval2, save_p));
2690 case tcc_expression:
2691 if (code == COND_EXPR)
2692 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2693 cval1, cval2, save_p)
2694 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2695 cval1, cval2, save_p)
2696 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2697 cval1, cval2, save_p));
2700 case tcc_comparison:
2701 /* First see if we can handle the first operand, then the second. For
2702 the second operand, we know *CVAL1 can't be zero. It must be that
2703 one side of the comparison is each of the values; test for the
2704 case where this isn't true by failing if the two operands
2707 if (operand_equal_p (TREE_OPERAND (arg, 0),
2708 TREE_OPERAND (arg, 1), 0))
2712 *cval1 = TREE_OPERAND (arg, 0);
2713 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2715 else if (*cval2 == 0)
2716 *cval2 = TREE_OPERAND (arg, 0);
2717 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2722 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2724 else if (*cval2 == 0)
2725 *cval2 = TREE_OPERAND (arg, 1);
2726 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2738 /* ARG is a tree that is known to contain just arithmetic operations and
2739 comparisons. Evaluate the operations in the tree substituting NEW0 for
2740 any occurrence of OLD0 as an operand of a comparison and likewise for
2744 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2746 tree type = TREE_TYPE (arg);
2747 enum tree_code code = TREE_CODE (arg);
2748 enum tree_code_class class = TREE_CODE_CLASS (code);
2750 /* We can handle some of the tcc_expression cases here. */
2751 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2753 else if (class == tcc_expression
2754 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2760 return fold (build1 (code, type,
2761 eval_subst (TREE_OPERAND (arg, 0),
2762 old0, new0, old1, new1)));
2765 return fold (build2 (code, type,
2766 eval_subst (TREE_OPERAND (arg, 0),
2767 old0, new0, old1, new1),
2768 eval_subst (TREE_OPERAND (arg, 1),
2769 old0, new0, old1, new1)));
2771 case tcc_expression:
2775 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2778 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2781 return fold (build3 (code, type,
2782 eval_subst (TREE_OPERAND (arg, 0),
2783 old0, new0, old1, new1),
2784 eval_subst (TREE_OPERAND (arg, 1),
2785 old0, new0, old1, new1),
2786 eval_subst (TREE_OPERAND (arg, 2),
2787 old0, new0, old1, new1)));
2791 /* Fall through - ??? */
2793 case tcc_comparison:
2795 tree arg0 = TREE_OPERAND (arg, 0);
2796 tree arg1 = TREE_OPERAND (arg, 1);
2798 /* We need to check both for exact equality and tree equality. The
2799 former will be true if the operand has a side-effect. In that
2800 case, we know the operand occurred exactly once. */
2802 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2804 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2807 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2809 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2812 return fold (build2 (code, type, arg0, arg1));
2820 /* Return a tree for the case when the result of an expression is RESULT
2821 converted to TYPE and OMITTED was previously an operand of the expression
2822 but is now not needed (e.g., we folded OMITTED * 0).
2824 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2825 the conversion of RESULT to TYPE. */
2828 omit_one_operand (tree type, tree result, tree omitted)
2830 tree t = fold_convert (type, result);
2832 if (TREE_SIDE_EFFECTS (omitted))
2833 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2835 return non_lvalue (t);
2838 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2841 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2843 tree t = fold_convert (type, result);
2845 if (TREE_SIDE_EFFECTS (omitted))
2846 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2848 return pedantic_non_lvalue (t);
2851 /* Return a tree for the case when the result of an expression is RESULT
2852 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2853 of the expression but are now not needed.
2855 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2856 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2857 evaluated before OMITTED2. Otherwise, if neither has side effects,
2858 just do the conversion of RESULT to TYPE. */
2861 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2863 tree t = fold_convert (type, result);
2865 if (TREE_SIDE_EFFECTS (omitted2))
2866 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2867 if (TREE_SIDE_EFFECTS (omitted1))
2868 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2870 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2874 /* Return a simplified tree node for the truth-negation of ARG. This
2875 never alters ARG itself. We assume that ARG is an operation that
2876 returns a truth value (0 or 1).
2878 FIXME: one would think we would fold the result, but it causes
2879 problems with the dominator optimizer. */
2881 invert_truthvalue (tree arg)
2883 tree type = TREE_TYPE (arg);
2884 enum tree_code code = TREE_CODE (arg);
2886 if (code == ERROR_MARK)
2889 /* If this is a comparison, we can simply invert it, except for
2890 floating-point non-equality comparisons, in which case we just
2891 enclose a TRUTH_NOT_EXPR around what we have. */
2893 if (TREE_CODE_CLASS (code) == tcc_comparison)
2895 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2896 if (FLOAT_TYPE_P (op_type)
2897 && flag_trapping_math
2898 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2899 && code != NE_EXPR && code != EQ_EXPR)
2900 return build1 (TRUTH_NOT_EXPR, type, arg);
2903 code = invert_tree_comparison (code,
2904 HONOR_NANS (TYPE_MODE (op_type)));
2905 if (code == ERROR_MARK)
2906 return build1 (TRUTH_NOT_EXPR, type, arg);
2908 return build2 (code, type,
2909 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2916 return fold_convert (type,
2917 build_int_cst (NULL_TREE, integer_zerop (arg)));
2919 case TRUTH_AND_EXPR:
2920 return build2 (TRUTH_OR_EXPR, type,
2921 invert_truthvalue (TREE_OPERAND (arg, 0)),
2922 invert_truthvalue (TREE_OPERAND (arg, 1)));
2925 return build2 (TRUTH_AND_EXPR, type,
2926 invert_truthvalue (TREE_OPERAND (arg, 0)),
2927 invert_truthvalue (TREE_OPERAND (arg, 1)));
2929 case TRUTH_XOR_EXPR:
2930 /* Here we can invert either operand. We invert the first operand
2931 unless the second operand is a TRUTH_NOT_EXPR in which case our
2932 result is the XOR of the first operand with the inside of the
2933 negation of the second operand. */
2935 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2936 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2937 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2939 return build2 (TRUTH_XOR_EXPR, type,
2940 invert_truthvalue (TREE_OPERAND (arg, 0)),
2941 TREE_OPERAND (arg, 1));
2943 case TRUTH_ANDIF_EXPR:
2944 return build2 (TRUTH_ORIF_EXPR, type,
2945 invert_truthvalue (TREE_OPERAND (arg, 0)),
2946 invert_truthvalue (TREE_OPERAND (arg, 1)));
2948 case TRUTH_ORIF_EXPR:
2949 return build2 (TRUTH_ANDIF_EXPR, type,
2950 invert_truthvalue (TREE_OPERAND (arg, 0)),
2951 invert_truthvalue (TREE_OPERAND (arg, 1)));
2953 case TRUTH_NOT_EXPR:
2954 return TREE_OPERAND (arg, 0);
2957 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2958 invert_truthvalue (TREE_OPERAND (arg, 1)),
2959 invert_truthvalue (TREE_OPERAND (arg, 2)));
2962 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2963 invert_truthvalue (TREE_OPERAND (arg, 1)));
2965 case NON_LVALUE_EXPR:
2966 return invert_truthvalue (TREE_OPERAND (arg, 0));
2969 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2974 return build1 (TREE_CODE (arg), type,
2975 invert_truthvalue (TREE_OPERAND (arg, 0)));
2978 if (!integer_onep (TREE_OPERAND (arg, 1)))
2980 return build2 (EQ_EXPR, type, arg,
2981 fold_convert (type, integer_zero_node));
2984 return build1 (TRUTH_NOT_EXPR, type, arg);
2986 case CLEANUP_POINT_EXPR:
2987 return build1 (CLEANUP_POINT_EXPR, type,
2988 invert_truthvalue (TREE_OPERAND (arg, 0)));
2993 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
2994 return build1 (TRUTH_NOT_EXPR, type, arg);
2997 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2998 operands are another bit-wise operation with a common input. If so,
2999 distribute the bit operations to save an operation and possibly two if
3000 constants are involved. For example, convert
3001 (A | B) & (A | C) into A | (B & C)
3002 Further simplification will occur if B and C are constants.
3004 If this optimization cannot be done, 0 will be returned. */
3007 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3012 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3013 || TREE_CODE (arg0) == code
3014 || (TREE_CODE (arg0) != BIT_AND_EXPR
3015 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3018 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3020 common = TREE_OPERAND (arg0, 0);
3021 left = TREE_OPERAND (arg0, 1);
3022 right = TREE_OPERAND (arg1, 1);
3024 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3026 common = TREE_OPERAND (arg0, 0);
3027 left = TREE_OPERAND (arg0, 1);
3028 right = TREE_OPERAND (arg1, 0);
3030 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3032 common = TREE_OPERAND (arg0, 1);
3033 left = TREE_OPERAND (arg0, 0);
3034 right = TREE_OPERAND (arg1, 1);
3036 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3038 common = TREE_OPERAND (arg0, 1);
3039 left = TREE_OPERAND (arg0, 0);
3040 right = TREE_OPERAND (arg1, 0);
3045 return fold (build2 (TREE_CODE (arg0), type, common,
3046 fold (build2 (code, type, left, right))));
3049 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3050 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3053 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3056 tree result = build3 (BIT_FIELD_REF, type, inner,
3057 size_int (bitsize), bitsize_int (bitpos));
3059 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3064 /* Optimize a bit-field compare.
3066 There are two cases: First is a compare against a constant and the
3067 second is a comparison of two items where the fields are at the same
3068 bit position relative to the start of a chunk (byte, halfword, word)
3069 large enough to contain it. In these cases we can avoid the shift
3070 implicit in bitfield extractions.
3072 For constants, we emit a compare of the shifted constant with the
3073 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3074 compared. For two fields at the same position, we do the ANDs with the
3075 similar mask and compare the result of the ANDs.
3077 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3078 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3079 are the left and right operands of the comparison, respectively.
3081 If the optimization described above can be done, we return the resulting
3082 tree. Otherwise we return zero. */
3085 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3088 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3089 tree type = TREE_TYPE (lhs);
3090 tree signed_type, unsigned_type;
3091 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3092 enum machine_mode lmode, rmode, nmode;
3093 int lunsignedp, runsignedp;
3094 int lvolatilep = 0, rvolatilep = 0;
3095 tree linner, rinner = NULL_TREE;
3099 /* Get all the information about the extractions being done. If the bit size
3100 if the same as the size of the underlying object, we aren't doing an
3101 extraction at all and so can do nothing. We also don't want to
3102 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3103 then will no longer be able to replace it. */
3104 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3105 &lunsignedp, &lvolatilep);
3106 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3107 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3112 /* If this is not a constant, we can only do something if bit positions,
3113 sizes, and signedness are the same. */
3114 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3115 &runsignedp, &rvolatilep);
3117 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3118 || lunsignedp != runsignedp || offset != 0
3119 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3123 /* See if we can find a mode to refer to this field. We should be able to,
3124 but fail if we can't. */
3125 nmode = get_best_mode (lbitsize, lbitpos,
3126 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3127 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3128 TYPE_ALIGN (TREE_TYPE (rinner))),
3129 word_mode, lvolatilep || rvolatilep);
3130 if (nmode == VOIDmode)
3133 /* Set signed and unsigned types of the precision of this mode for the
3135 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3136 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3138 /* Compute the bit position and size for the new reference and our offset
3139 within it. If the new reference is the same size as the original, we
3140 won't optimize anything, so return zero. */
3141 nbitsize = GET_MODE_BITSIZE (nmode);
3142 nbitpos = lbitpos & ~ (nbitsize - 1);
3144 if (nbitsize == lbitsize)
3147 if (BYTES_BIG_ENDIAN)
3148 lbitpos = nbitsize - lbitsize - lbitpos;
3150 /* Make the mask to be used against the extracted field. */
3151 mask = build_int_cst (unsigned_type, -1);
3152 mask = force_fit_type (mask, 0, false, false);
3153 mask = fold_convert (unsigned_type, mask);
3154 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3155 mask = const_binop (RSHIFT_EXPR, mask,
3156 size_int (nbitsize - lbitsize - lbitpos), 0);
3159 /* If not comparing with constant, just rework the comparison
3161 return build2 (code, compare_type,
3162 build2 (BIT_AND_EXPR, unsigned_type,
3163 make_bit_field_ref (linner, unsigned_type,
3164 nbitsize, nbitpos, 1),
3166 build2 (BIT_AND_EXPR, unsigned_type,
3167 make_bit_field_ref (rinner, unsigned_type,
3168 nbitsize, nbitpos, 1),
3171 /* Otherwise, we are handling the constant case. See if the constant is too
3172 big for the field. Warn and return a tree of for 0 (false) if so. We do
3173 this not only for its own sake, but to avoid having to test for this
3174 error case below. If we didn't, we might generate wrong code.
3176 For unsigned fields, the constant shifted right by the field length should
3177 be all zero. For signed fields, the high-order bits should agree with
3182 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3183 fold_convert (unsigned_type, rhs),
3184 size_int (lbitsize), 0)))
3186 warning ("comparison is always %d due to width of bit-field",
3188 return constant_boolean_node (code == NE_EXPR, compare_type);
3193 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3194 size_int (lbitsize - 1), 0);
3195 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3197 warning ("comparison is always %d due to width of bit-field",
3199 return constant_boolean_node (code == NE_EXPR, compare_type);
3203 /* Single-bit compares should always be against zero. */
3204 if (lbitsize == 1 && ! integer_zerop (rhs))
3206 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3207 rhs = fold_convert (type, integer_zero_node);
3210 /* Make a new bitfield reference, shift the constant over the
3211 appropriate number of bits and mask it with the computed mask
3212 (in case this was a signed field). If we changed it, make a new one. */
3213 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3216 TREE_SIDE_EFFECTS (lhs) = 1;
3217 TREE_THIS_VOLATILE (lhs) = 1;
3220 rhs = fold (const_binop (BIT_AND_EXPR,
3221 const_binop (LSHIFT_EXPR,
3222 fold_convert (unsigned_type, rhs),
3223 size_int (lbitpos), 0),
3226 return build2 (code, compare_type,
3227 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3231 /* Subroutine for fold_truthop: decode a field reference.
3233 If EXP is a comparison reference, we return the innermost reference.
3235 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3236 set to the starting bit number.
3238 If the innermost field can be completely contained in a mode-sized
3239 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3241 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3242 otherwise it is not changed.
3244 *PUNSIGNEDP is set to the signedness of the field.
3246 *PMASK is set to the mask used. This is either contained in a
3247 BIT_AND_EXPR or derived from the width of the field.
3249 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3251 Return 0 if this is not a component reference or is one that we can't
3252 do anything with. */
3255 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3256 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3257 int *punsignedp, int *pvolatilep,
3258 tree *pmask, tree *pand_mask)
3260 tree outer_type = 0;
3262 tree mask, inner, offset;
3264 unsigned int precision;
3266 /* All the optimizations using this function assume integer fields.
3267 There are problems with FP fields since the type_for_size call
3268 below can fail for, e.g., XFmode. */
3269 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3272 /* We are interested in the bare arrangement of bits, so strip everything
3273 that doesn't affect the machine mode. However, record the type of the
3274 outermost expression if it may matter below. */
3275 if (TREE_CODE (exp) == NOP_EXPR
3276 || TREE_CODE (exp) == CONVERT_EXPR
3277 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3278 outer_type = TREE_TYPE (exp);
3281 if (TREE_CODE (exp) == BIT_AND_EXPR)
3283 and_mask = TREE_OPERAND (exp, 1);
3284 exp = TREE_OPERAND (exp, 0);
3285 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3286 if (TREE_CODE (and_mask) != INTEGER_CST)
3290 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3291 punsignedp, pvolatilep);
3292 if ((inner == exp && and_mask == 0)
3293 || *pbitsize < 0 || offset != 0
3294 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3297 /* If the number of bits in the reference is the same as the bitsize of
3298 the outer type, then the outer type gives the signedness. Otherwise
3299 (in case of a small bitfield) the signedness is unchanged. */
3300 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3301 *punsignedp = TYPE_UNSIGNED (outer_type);
3303 /* Compute the mask to access the bitfield. */
3304 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3305 precision = TYPE_PRECISION (unsigned_type);
3307 mask = build_int_cst (unsigned_type, -1);
3308 mask = force_fit_type (mask, 0, false, false);
3310 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3311 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3313 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3315 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3316 fold_convert (unsigned_type, and_mask), mask));
3319 *pand_mask = and_mask;
3323 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3327 all_ones_mask_p (tree mask, int size)
3329 tree type = TREE_TYPE (mask);
3330 unsigned int precision = TYPE_PRECISION (type);
3333 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3334 tmask = force_fit_type (tmask, 0, false, false);
3337 tree_int_cst_equal (mask,
3338 const_binop (RSHIFT_EXPR,
3339 const_binop (LSHIFT_EXPR, tmask,
3340 size_int (precision - size),
3342 size_int (precision - size), 0));
3345 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3346 represents the sign bit of EXP's type. If EXP represents a sign
3347 or zero extension, also test VAL against the unextended type.
3348 The return value is the (sub)expression whose sign bit is VAL,
3349 or NULL_TREE otherwise. */
3352 sign_bit_p (tree exp, tree val)
3354 unsigned HOST_WIDE_INT mask_lo, lo;
3355 HOST_WIDE_INT mask_hi, hi;
3359 /* Tree EXP must have an integral type. */
3360 t = TREE_TYPE (exp);
3361 if (! INTEGRAL_TYPE_P (t))
3364 /* Tree VAL must be an integer constant. */
3365 if (TREE_CODE (val) != INTEGER_CST
3366 || TREE_CONSTANT_OVERFLOW (val))
3369 width = TYPE_PRECISION (t);
3370 if (width > HOST_BITS_PER_WIDE_INT)
3372 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3375 mask_hi = ((unsigned HOST_WIDE_INT) -1
3376 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3382 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3385 mask_lo = ((unsigned HOST_WIDE_INT) -1
3386 >> (HOST_BITS_PER_WIDE_INT - width));
3389 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3390 treat VAL as if it were unsigned. */
3391 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3392 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3395 /* Handle extension from a narrower type. */
3396 if (TREE_CODE (exp) == NOP_EXPR
3397 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3398 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3403 /* Subroutine for fold_truthop: determine if an operand is simple enough
3404 to be evaluated unconditionally. */
3407 simple_operand_p (tree exp)
3409 /* Strip any conversions that don't change the machine mode. */
3412 return (CONSTANT_CLASS_P (exp)
3413 || TREE_CODE (exp) == SSA_NAME
3415 && ! TREE_ADDRESSABLE (exp)
3416 && ! TREE_THIS_VOLATILE (exp)
3417 && ! DECL_NONLOCAL (exp)
3418 /* Don't regard global variables as simple. They may be
3419 allocated in ways unknown to the compiler (shared memory,
3420 #pragma weak, etc). */
3421 && ! TREE_PUBLIC (exp)
3422 && ! DECL_EXTERNAL (exp)
3423 /* Loading a static variable is unduly expensive, but global
3424 registers aren't expensive. */
3425 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3428 /* The following functions are subroutines to fold_range_test and allow it to
3429 try to change a logical combination of comparisons into a range test.
3432 X == 2 || X == 3 || X == 4 || X == 5
3436 (unsigned) (X - 2) <= 3
3438 We describe each set of comparisons as being either inside or outside
3439 a range, using a variable named like IN_P, and then describe the
3440 range with a lower and upper bound. If one of the bounds is omitted,
3441 it represents either the highest or lowest value of the type.
3443 In the comments below, we represent a range by two numbers in brackets
3444 preceded by a "+" to designate being inside that range, or a "-" to
3445 designate being outside that range, so the condition can be inverted by
3446 flipping the prefix. An omitted bound is represented by a "-". For
3447 example, "- [-, 10]" means being outside the range starting at the lowest
3448 possible value and ending at 10, in other words, being greater than 10.
3449 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3452 We set up things so that the missing bounds are handled in a consistent
3453 manner so neither a missing bound nor "true" and "false" need to be
3454 handled using a special case. */
3456 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3457 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3458 and UPPER1_P are nonzero if the respective argument is an upper bound
3459 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3460 must be specified for a comparison. ARG1 will be converted to ARG0's
3461 type if both are specified. */
3464 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3465 tree arg1, int upper1_p)
3471 /* If neither arg represents infinity, do the normal operation.
3472 Else, if not a comparison, return infinity. Else handle the special
3473 comparison rules. Note that most of the cases below won't occur, but
3474 are handled for consistency. */
3476 if (arg0 != 0 && arg1 != 0)
3478 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3479 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3481 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3484 if (TREE_CODE_CLASS (code) != tcc_comparison)
3487 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3488 for neither. In real maths, we cannot assume open ended ranges are
3489 the same. But, this is computer arithmetic, where numbers are finite.
3490 We can therefore make the transformation of any unbounded range with
3491 the value Z, Z being greater than any representable number. This permits
3492 us to treat unbounded ranges as equal. */
3493 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3494 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3498 result = sgn0 == sgn1;
3501 result = sgn0 != sgn1;
3504 result = sgn0 < sgn1;
3507 result = sgn0 <= sgn1;
3510 result = sgn0 > sgn1;
3513 result = sgn0 >= sgn1;
3519 return constant_boolean_node (result, type);
3522 /* Given EXP, a logical expression, set the range it is testing into
3523 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3524 actually being tested. *PLOW and *PHIGH will be made of the same type
3525 as the returned expression. If EXP is not a comparison, we will most
3526 likely not be returning a useful value and range. */
3529 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3531 enum tree_code code;
3532 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3533 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3535 tree low, high, n_low, n_high;
3537 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3538 and see if we can refine the range. Some of the cases below may not
3539 happen, but it doesn't seem worth worrying about this. We "continue"
3540 the outer loop when we've changed something; otherwise we "break"
3541 the switch, which will "break" the while. */
3544 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3548 code = TREE_CODE (exp);
3549 exp_type = TREE_TYPE (exp);
3551 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3553 if (TREE_CODE_LENGTH (code) > 0)
3554 arg0 = TREE_OPERAND (exp, 0);
3555 if (TREE_CODE_CLASS (code) == tcc_comparison
3556 || TREE_CODE_CLASS (code) == tcc_unary
3557 || TREE_CODE_CLASS (code) == tcc_binary)
3558 arg0_type = TREE_TYPE (arg0);
3559 if (TREE_CODE_CLASS (code) == tcc_binary
3560 || TREE_CODE_CLASS (code) == tcc_comparison
3561 || (TREE_CODE_CLASS (code) == tcc_expression
3562 && TREE_CODE_LENGTH (code) > 1))
3563 arg1 = TREE_OPERAND (exp, 1);
3568 case TRUTH_NOT_EXPR:
3569 in_p = ! in_p, exp = arg0;
3572 case EQ_EXPR: case NE_EXPR:
3573 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3574 /* We can only do something if the range is testing for zero
3575 and if the second operand is an integer constant. Note that
3576 saying something is "in" the range we make is done by
3577 complementing IN_P since it will set in the initial case of
3578 being not equal to zero; "out" is leaving it alone. */
3579 if (low == 0 || high == 0
3580 || ! integer_zerop (low) || ! integer_zerop (high)
3581 || TREE_CODE (arg1) != INTEGER_CST)
3586 case NE_EXPR: /* - [c, c] */
3589 case EQ_EXPR: /* + [c, c] */
3590 in_p = ! in_p, low = high = arg1;
3592 case GT_EXPR: /* - [-, c] */
3593 low = 0, high = arg1;
3595 case GE_EXPR: /* + [c, -] */
3596 in_p = ! in_p, low = arg1, high = 0;
3598 case LT_EXPR: /* - [c, -] */
3599 low = arg1, high = 0;
3601 case LE_EXPR: /* + [-, c] */
3602 in_p = ! in_p, low = 0, high = arg1;
3608 /* If this is an unsigned comparison, we also know that EXP is
3609 greater than or equal to zero. We base the range tests we make
3610 on that fact, so we record it here so we can parse existing
3611 range tests. We test arg0_type since often the return type
3612 of, e.g. EQ_EXPR, is boolean. */
3613 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3615 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3617 fold_convert (arg0_type, integer_zero_node),
3621 in_p = n_in_p, low = n_low, high = n_high;
3623 /* If the high bound is missing, but we have a nonzero low
3624 bound, reverse the range so it goes from zero to the low bound
3626 if (high == 0 && low && ! integer_zerop (low))
3629 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3630 integer_one_node, 0);
3631 low = fold_convert (arg0_type, integer_zero_node);
3639 /* (-x) IN [a,b] -> x in [-b, -a] */
3640 n_low = range_binop (MINUS_EXPR, exp_type,
3641 fold_convert (exp_type, integer_zero_node),
3643 n_high = range_binop (MINUS_EXPR, exp_type,
3644 fold_convert (exp_type, integer_zero_node),
3646 low = n_low, high = n_high;
3652 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3653 fold_convert (exp_type, integer_one_node));
3656 case PLUS_EXPR: case MINUS_EXPR:
3657 if (TREE_CODE (arg1) != INTEGER_CST)
3660 /* If EXP is signed, any overflow in the computation is undefined,
3661 so we don't worry about it so long as our computations on
3662 the bounds don't overflow. For unsigned, overflow is defined
3663 and this is exactly the right thing. */
3664 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3665 arg0_type, low, 0, arg1, 0);
3666 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3667 arg0_type, high, 1, arg1, 0);
3668 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3669 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3672 /* Check for an unsigned range which has wrapped around the maximum
3673 value thus making n_high < n_low, and normalize it. */
3674 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3676 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3677 integer_one_node, 0);
3678 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3679 integer_one_node, 0);
3681 /* If the range is of the form +/- [ x+1, x ], we won't
3682 be able to normalize it. But then, it represents the
3683 whole range or the empty set, so make it
3685 if (tree_int_cst_equal (n_low, low)
3686 && tree_int_cst_equal (n_high, high))
3692 low = n_low, high = n_high;
3697 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3698 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3701 if (! INTEGRAL_TYPE_P (arg0_type)
3702 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3703 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3706 n_low = low, n_high = high;
3709 n_low = fold_convert (arg0_type, n_low);
3712 n_high = fold_convert (arg0_type, n_high);
3715 /* If we're converting arg0 from an unsigned type, to exp,
3716 a signed type, we will be doing the comparison as unsigned.
3717 The tests above have already verified that LOW and HIGH
3720 So we have to ensure that we will handle large unsigned
3721 values the same way that the current signed bounds treat
3724 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3727 tree equiv_type = lang_hooks.types.type_for_mode
3728 (TYPE_MODE (arg0_type), 1);
3730 /* A range without an upper bound is, naturally, unbounded.
3731 Since convert would have cropped a very large value, use
3732 the max value for the destination type. */
3734 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3735 : TYPE_MAX_VALUE (arg0_type);
3737 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3738 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3739 fold_convert (arg0_type,
3741 fold_convert (arg0_type,
3742 integer_one_node)));
3744 /* If the low bound is specified, "and" the range with the
3745 range for which the original unsigned value will be
3749 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3750 1, n_low, n_high, 1,
3751 fold_convert (arg0_type,
3756 in_p = (n_in_p == in_p);
3760 /* Otherwise, "or" the range with the range of the input
3761 that will be interpreted as negative. */
3762 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3763 0, n_low, n_high, 1,
3764 fold_convert (arg0_type,
3769 in_p = (in_p != n_in_p);
3774 low = n_low, high = n_high;
3784 /* If EXP is a constant, we can evaluate whether this is true or false. */
3785 if (TREE_CODE (exp) == INTEGER_CST)
3787 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3789 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3795 *pin_p = in_p, *plow = low, *phigh = high;
3799 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3800 type, TYPE, return an expression to test if EXP is in (or out of, depending
3801 on IN_P) the range. Return 0 if the test couldn't be created. */
3804 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3806 tree etype = TREE_TYPE (exp);
3811 value = build_range_check (type, exp, 1, low, high);
3813 return invert_truthvalue (value);
3818 if (low == 0 && high == 0)
3819 return fold_convert (type, integer_one_node);
3822 return fold (build2 (LE_EXPR, type, exp, high));
3825 return fold (build2 (GE_EXPR, type, exp, low));
3827 if (operand_equal_p (low, high, 0))
3828 return fold (build2 (EQ_EXPR, type, exp, low));
3830 if (integer_zerop (low))
3832 if (! TYPE_UNSIGNED (etype))
3834 etype = lang_hooks.types.unsigned_type (etype);
3835 high = fold_convert (etype, high);
3836 exp = fold_convert (etype, exp);
3838 return build_range_check (type, exp, 1, 0, high);
3841 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3842 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3844 unsigned HOST_WIDE_INT lo;
3848 prec = TYPE_PRECISION (etype);
3849 if (prec <= HOST_BITS_PER_WIDE_INT)
3852 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3856 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3857 lo = (unsigned HOST_WIDE_INT) -1;
3860 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3862 if (TYPE_UNSIGNED (etype))
3864 etype = lang_hooks.types.signed_type (etype);
3865 exp = fold_convert (etype, exp);
3867 return fold (build2 (GT_EXPR, type, exp,
3868 fold_convert (etype, integer_zero_node)));
3872 value = const_binop (MINUS_EXPR, high, low, 0);
3873 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3875 tree utype, minv, maxv;
3877 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3878 for the type in question, as we rely on this here. */
3879 switch (TREE_CODE (etype))
3884 utype = lang_hooks.types.unsigned_type (etype);
3885 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3886 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3887 integer_one_node, 1);
3888 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3889 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3893 high = fold_convert (etype, high);
3894 low = fold_convert (etype, low);
3895 exp = fold_convert (etype, exp);
3896 value = const_binop (MINUS_EXPR, high, low, 0);
3904 if (value != 0 && ! TREE_OVERFLOW (value))
3905 return build_range_check (type,
3906 fold (build2 (MINUS_EXPR, etype, exp, low)),
3907 1, fold_convert (etype, integer_zero_node),
3913 /* Given two ranges, see if we can merge them into one. Return 1 if we
3914 can, 0 if we can't. Set the output range into the specified parameters. */
3917 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3918 tree high0, int in1_p, tree low1, tree high1)
3926 int lowequal = ((low0 == 0 && low1 == 0)
3927 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3928 low0, 0, low1, 0)));
3929 int highequal = ((high0 == 0 && high1 == 0)
3930 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3931 high0, 1, high1, 1)));
3933 /* Make range 0 be the range that starts first, or ends last if they
3934 start at the same value. Swap them if it isn't. */
3935 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3938 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3939 high1, 1, high0, 1))))
3941 temp = in0_p, in0_p = in1_p, in1_p = temp;
3942 tem = low0, low0 = low1, low1 = tem;
3943 tem = high0, high0 = high1, high1 = tem;
3946 /* Now flag two cases, whether the ranges are disjoint or whether the
3947 second range is totally subsumed in the first. Note that the tests
3948 below are simplified by the ones above. */
3949 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3950 high0, 1, low1, 0));
3951 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3952 high1, 1, high0, 1));
3954 /* We now have four cases, depending on whether we are including or
3955 excluding the two ranges. */
3958 /* If they don't overlap, the result is false. If the second range
3959 is a subset it is the result. Otherwise, the range is from the start
3960 of the second to the end of the first. */
3962 in_p = 0, low = high = 0;
3964 in_p = 1, low = low1, high = high1;
3966 in_p = 1, low = low1, high = high0;
3969 else if (in0_p && ! in1_p)
3971 /* If they don't overlap, the result is the first range. If they are
3972 equal, the result is false. If the second range is a subset of the
3973 first, and the ranges begin at the same place, we go from just after
3974 the end of the first range to the end of the second. If the second
3975 range is not a subset of the first, or if it is a subset and both
3976 ranges end at the same place, the range starts at the start of the
3977 first range and ends just before the second range.
3978 Otherwise, we can't describe this as a single range. */
3980 in_p = 1, low = low0, high = high0;
3981 else if (lowequal && highequal)
3982 in_p = 0, low = high = 0;
3983 else if (subset && lowequal)
3985 in_p = 1, high = high0;
3986 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3987 integer_one_node, 0);
3989 else if (! subset || highequal)
3991 in_p = 1, low = low0;
3992 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3993 integer_one_node, 0);
3999 else if (! in0_p && in1_p)
4001 /* If they don't overlap, the result is the second range. If the second
4002 is a subset of the first, the result is false. Otherwise,
4003 the range starts just after the first range and ends at the
4004 end of the second. */
4006 in_p = 1, low = low1, high = high1;
4007 else if (subset || highequal)
4008 in_p = 0, low = high = 0;
4011 in_p = 1, high = high1;
4012 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4013 integer_one_node, 0);
4019 /* The case where we are excluding both ranges. Here the complex case
4020 is if they don't overlap. In that case, the only time we have a
4021 range is if they are adjacent. If the second is a subset of the
4022 first, the result is the first. Otherwise, the range to exclude
4023 starts at the beginning of the first range and ends at the end of the
4027 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4028 range_binop (PLUS_EXPR, NULL_TREE,
4030 integer_one_node, 1),
4032 in_p = 0, low = low0, high = high1;
4035 /* Canonicalize - [min, x] into - [-, x]. */
4036 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4037 switch (TREE_CODE (TREE_TYPE (low0)))
4040 if (TYPE_PRECISION (TREE_TYPE (low0))
4041 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4046 if (tree_int_cst_equal (low0,
4047 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4051 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4052 && integer_zerop (low0))
4059 /* Canonicalize - [x, max] into - [x, -]. */
4060 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4061 switch (TREE_CODE (TREE_TYPE (high1)))
4064 if (TYPE_PRECISION (TREE_TYPE (high1))
4065 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4070 if (tree_int_cst_equal (high1,
4071 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4075 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4076 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4078 integer_one_node, 1)))
4085 /* The ranges might be also adjacent between the maximum and
4086 minimum values of the given type. For
4087 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4088 return + [x + 1, y - 1]. */
4089 if (low0 == 0 && high1 == 0)
4091 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4092 integer_one_node, 1);
4093 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4094 integer_one_node, 0);
4095 if (low == 0 || high == 0)
4105 in_p = 0, low = low0, high = high0;
4107 in_p = 0, low = low0, high = high1;
4110 *pin_p = in_p, *plow = low, *phigh = high;
4115 /* Subroutine of fold, looking inside expressions of the form
4116 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4117 of the COND_EXPR. This function is being used also to optimize
4118 A op B ? C : A, by reversing the comparison first.
4120 Return a folded expression whose code is not a COND_EXPR
4121 anymore, or NULL_TREE if no folding opportunity is found. */
4124 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4126 enum tree_code comp_code = TREE_CODE (arg0);
4127 tree arg00 = TREE_OPERAND (arg0, 0);
4128 tree arg01 = TREE_OPERAND (arg0, 1);
4129 tree arg1_type = TREE_TYPE (arg1);
4135 /* If we have A op 0 ? A : -A, consider applying the following
4138 A == 0? A : -A same as -A
4139 A != 0? A : -A same as A
4140 A >= 0? A : -A same as abs (A)
4141 A > 0? A : -A same as abs (A)
4142 A <= 0? A : -A same as -abs (A)
4143 A < 0? A : -A same as -abs (A)
4145 None of these transformations work for modes with signed
4146 zeros. If A is +/-0, the first two transformations will
4147 change the sign of the result (from +0 to -0, or vice
4148 versa). The last four will fix the sign of the result,
4149 even though the original expressions could be positive or
4150 negative, depending on the sign of A.
4152 Note that all these transformations are correct if A is
4153 NaN, since the two alternatives (A and -A) are also NaNs. */
4154 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4155 ? real_zerop (arg01)
4156 : integer_zerop (arg01))
4157 && TREE_CODE (arg2) == NEGATE_EXPR
4158 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4163 tem = fold_convert (arg1_type, arg1);
4164 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4167 return pedantic_non_lvalue (fold_convert (type, arg1));
4170 if (flag_trapping_math)
4175 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4176 arg1 = fold_convert (lang_hooks.types.signed_type
4177 (TREE_TYPE (arg1)), arg1);
4178 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4179 return pedantic_non_lvalue (fold_convert (type, tem));
4182 if (flag_trapping_math)
4186 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4187 arg1 = fold_convert (lang_hooks.types.signed_type
4188 (TREE_TYPE (arg1)), arg1);
4189 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4190 return negate_expr (fold_convert (type, tem));
4192 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4196 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4197 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4198 both transformations are correct when A is NaN: A != 0
4199 is then true, and A == 0 is false. */
4201 if (integer_zerop (arg01) && integer_zerop (arg2))
4203 if (comp_code == NE_EXPR)
4204 return pedantic_non_lvalue (fold_convert (type, arg1));
4205 else if (comp_code == EQ_EXPR)
4206 return fold_convert (type, integer_zero_node);
4209 /* Try some transformations of A op B ? A : B.
4211 A == B? A : B same as B
4212 A != B? A : B same as A
4213 A >= B? A : B same as max (A, B)
4214 A > B? A : B same as max (B, A)
4215 A <= B? A : B same as min (A, B)
4216 A < B? A : B same as min (B, A)
4218 As above, these transformations don't work in the presence
4219 of signed zeros. For example, if A and B are zeros of
4220 opposite sign, the first two transformations will change
4221 the sign of the result. In the last four, the original
4222 expressions give different results for (A=+0, B=-0) and
4223 (A=-0, B=+0), but the transformed expressions do not.
4225 The first two transformations are correct if either A or B
4226 is a NaN. In the first transformation, the condition will
4227 be false, and B will indeed be chosen. In the case of the
4228 second transformation, the condition A != B will be true,
4229 and A will be chosen.
4231 The conversions to max() and min() are not correct if B is
4232 a number and A is not. The conditions in the original
4233 expressions will be false, so all four give B. The min()
4234 and max() versions would give a NaN instead. */
4235 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4237 tree comp_op0 = arg00;
4238 tree comp_op1 = arg01;
4239 tree comp_type = TREE_TYPE (comp_op0);
4241 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4242 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4252 return pedantic_non_lvalue (fold_convert (type, arg2));
4254 return pedantic_non_lvalue (fold_convert (type, arg1));
4259 /* In C++ a ?: expression can be an lvalue, so put the
4260 operand which will be used if they are equal first
4261 so that we can convert this back to the
4262 corresponding COND_EXPR. */
4263 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4265 comp_op0 = fold_convert (comp_type, comp_op0);
4266 comp_op1 = fold_convert (comp_type, comp_op1);
4267 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4268 ? fold (build2 (MIN_EXPR, comp_type, comp_op0, comp_op1))
4269 : fold (build2 (MIN_EXPR, comp_type, comp_op1, comp_op0));
4270 return pedantic_non_lvalue (fold_convert (type, tem));
4277 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4279 comp_op0 = fold_convert (comp_type, comp_op0);
4280 comp_op1 = fold_convert (comp_type, comp_op1);
4281 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4282 ? fold (build2 (MAX_EXPR, comp_type, comp_op0, comp_op1))
4283 : fold (build2 (MAX_EXPR, comp_type, comp_op1, comp_op0));
4284 return pedantic_non_lvalue (fold_convert (type, tem));
4288 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4289 return pedantic_non_lvalue (fold_convert (type, arg2));
4292 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4293 return pedantic_non_lvalue (fold_convert (type, arg1));
4296 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4301 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4302 we might still be able to simplify this. For example,
4303 if C1 is one less or one more than C2, this might have started
4304 out as a MIN or MAX and been transformed by this function.
4305 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4307 if (INTEGRAL_TYPE_P (type)
4308 && TREE_CODE (arg01) == INTEGER_CST
4309 && TREE_CODE (arg2) == INTEGER_CST)
4313 /* We can replace A with C1 in this case. */
4314 arg1 = fold_convert (type, arg01);
4315 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4318 /* If C1 is C2 + 1, this is min(A, C2). */
4319 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4321 && operand_equal_p (arg01,
4322 const_binop (PLUS_EXPR, arg2,
4323 integer_one_node, 0),
4325 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4326 type, arg1, arg2)));
4330 /* If C1 is C2 - 1, this is min(A, C2). */
4331 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4333 && operand_equal_p (arg01,
4334 const_binop (MINUS_EXPR, arg2,
4335 integer_one_node, 0),
4337 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4338 type, arg1, arg2)));
4342 /* If C1 is C2 - 1, this is max(A, C2). */
4343 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4345 && operand_equal_p (arg01,
4346 const_binop (MINUS_EXPR, arg2,
4347 integer_one_node, 0),
4349 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4350 type, arg1, arg2)));
4354 /* If C1 is C2 + 1, this is max(A, C2). */
4355 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4357 && operand_equal_p (arg01,
4358 const_binop (PLUS_EXPR, arg2,
4359 integer_one_node, 0),
4361 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4362 type, arg1, arg2)));
4375 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4376 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4379 /* EXP is some logical combination of boolean tests. See if we can
4380 merge it into some range test. Return the new tree if so. */
4383 fold_range_test (tree exp)
4385 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4386 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4387 int in0_p, in1_p, in_p;
4388 tree low0, low1, low, high0, high1, high;
4389 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4390 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4393 /* If this is an OR operation, invert both sides; we will invert
4394 again at the end. */
4396 in0_p = ! in0_p, in1_p = ! in1_p;
4398 /* If both expressions are the same, if we can merge the ranges, and we
4399 can build the range test, return it or it inverted. If one of the
4400 ranges is always true or always false, consider it to be the same
4401 expression as the other. */
4402 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4403 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4405 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4407 : rhs != 0 ? rhs : integer_zero_node,
4409 return or_op ? invert_truthvalue (tem) : tem;
4411 /* On machines where the branch cost is expensive, if this is a
4412 short-circuited branch and the underlying object on both sides
4413 is the same, make a non-short-circuit operation. */
4414 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4415 && lhs != 0 && rhs != 0
4416 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4417 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4418 && operand_equal_p (lhs, rhs, 0))
4420 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4421 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4422 which cases we can't do this. */
4423 if (simple_operand_p (lhs))
4424 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4425 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4426 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4427 TREE_OPERAND (exp, 1));
4429 else if (lang_hooks.decls.global_bindings_p () == 0
4430 && ! CONTAINS_PLACEHOLDER_P (lhs))
4432 tree common = save_expr (lhs);
4434 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4435 or_op ? ! in0_p : in0_p,
4437 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4438 or_op ? ! in1_p : in1_p,
4440 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4441 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4442 TREE_TYPE (exp), lhs, rhs);
4449 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4450 bit value. Arrange things so the extra bits will be set to zero if and
4451 only if C is signed-extended to its full width. If MASK is nonzero,
4452 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4455 unextend (tree c, int p, int unsignedp, tree mask)
4457 tree type = TREE_TYPE (c);
4458 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4461 if (p == modesize || unsignedp)
4464 /* We work by getting just the sign bit into the low-order bit, then
4465 into the high-order bit, then sign-extend. We then XOR that value
4467 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4468 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4470 /* We must use a signed type in order to get an arithmetic right shift.
4471 However, we must also avoid introducing accidental overflows, so that
4472 a subsequent call to integer_zerop will work. Hence we must
4473 do the type conversion here. At this point, the constant is either
4474 zero or one, and the conversion to a signed type can never overflow.
4475 We could get an overflow if this conversion is done anywhere else. */
4476 if (TYPE_UNSIGNED (type))
4477 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4479 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4480 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4482 temp = const_binop (BIT_AND_EXPR, temp,
4483 fold_convert (TREE_TYPE (c), mask), 0);
4484 /* If necessary, convert the type back to match the type of C. */
4485 if (TYPE_UNSIGNED (type))
4486 temp = fold_convert (type, temp);
4488 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4491 /* Find ways of folding logical expressions of LHS and RHS:
4492 Try to merge two comparisons to the same innermost item.
4493 Look for range tests like "ch >= '0' && ch <= '9'".
4494 Look for combinations of simple terms on machines with expensive branches
4495 and evaluate the RHS unconditionally.
4497 For example, if we have p->a == 2 && p->b == 4 and we can make an
4498 object large enough to span both A and B, we can do this with a comparison
4499 against the object ANDed with the a mask.
4501 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4502 operations to do this with one comparison.
4504 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4505 function and the one above.
4507 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4508 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4510 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4513 We return the simplified tree or 0 if no optimization is possible. */
4516 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4518 /* If this is the "or" of two comparisons, we can do something if
4519 the comparisons are NE_EXPR. If this is the "and", we can do something
4520 if the comparisons are EQ_EXPR. I.e.,
4521 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4523 WANTED_CODE is this operation code. For single bit fields, we can
4524 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4525 comparison for one-bit fields. */
4527 enum tree_code wanted_code;
4528 enum tree_code lcode, rcode;
4529 tree ll_arg, lr_arg, rl_arg, rr_arg;
4530 tree ll_inner, lr_inner, rl_inner, rr_inner;
4531 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4532 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4533 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4534 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4535 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4536 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4537 enum machine_mode lnmode, rnmode;
4538 tree ll_mask, lr_mask, rl_mask, rr_mask;
4539 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4540 tree l_const, r_const;
4541 tree lntype, rntype, result;
4542 int first_bit, end_bit;
4545 /* Start by getting the comparison codes. Fail if anything is volatile.
4546 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4547 it were surrounded with a NE_EXPR. */
4549 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4552 lcode = TREE_CODE (lhs);
4553 rcode = TREE_CODE (rhs);
4555 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4557 lhs = build2 (NE_EXPR, truth_type, lhs,
4558 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4562 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4564 rhs = build2 (NE_EXPR, truth_type, rhs,
4565 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4569 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4570 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4573 ll_arg = TREE_OPERAND (lhs, 0);
4574 lr_arg = TREE_OPERAND (lhs, 1);
4575 rl_arg = TREE_OPERAND (rhs, 0);
4576 rr_arg = TREE_OPERAND (rhs, 1);
4578 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4579 if (simple_operand_p (ll_arg)
4580 && simple_operand_p (lr_arg))
4583 if (operand_equal_p (ll_arg, rl_arg, 0)
4584 && operand_equal_p (lr_arg, rr_arg, 0))
4586 result = combine_comparisons (code, lcode, rcode,
4587 truth_type, ll_arg, lr_arg);
4591 else if (operand_equal_p (ll_arg, rr_arg, 0)
4592 && operand_equal_p (lr_arg, rl_arg, 0))
4594 result = combine_comparisons (code, lcode,
4595 swap_tree_comparison (rcode),
4596 truth_type, ll_arg, lr_arg);
4602 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4603 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4605 /* If the RHS can be evaluated unconditionally and its operands are
4606 simple, it wins to evaluate the RHS unconditionally on machines
4607 with expensive branches. In this case, this isn't a comparison
4608 that can be merged. Avoid doing this if the RHS is a floating-point
4609 comparison since those can trap. */
4611 if (BRANCH_COST >= 2
4612 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4613 && simple_operand_p (rl_arg)
4614 && simple_operand_p (rr_arg))
4616 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4617 if (code == TRUTH_OR_EXPR
4618 && lcode == NE_EXPR && integer_zerop (lr_arg)
4619 && rcode == NE_EXPR && integer_zerop (rr_arg)
4620 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4621 return build2 (NE_EXPR, truth_type,
4622 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4624 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4626 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4627 if (code == TRUTH_AND_EXPR
4628 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4629 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4630 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4631 return build2 (EQ_EXPR, truth_type,
4632 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4634 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4636 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4637 return build2 (code, truth_type, lhs, rhs);
4640 /* See if the comparisons can be merged. Then get all the parameters for
4643 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4644 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4648 ll_inner = decode_field_reference (ll_arg,
4649 &ll_bitsize, &ll_bitpos, &ll_mode,
4650 &ll_unsignedp, &volatilep, &ll_mask,
4652 lr_inner = decode_field_reference (lr_arg,
4653 &lr_bitsize, &lr_bitpos, &lr_mode,
4654 &lr_unsignedp, &volatilep, &lr_mask,
4656 rl_inner = decode_field_reference (rl_arg,
4657 &rl_bitsize, &rl_bitpos, &rl_mode,
4658 &rl_unsignedp, &volatilep, &rl_mask,
4660 rr_inner = decode_field_reference (rr_arg,
4661 &rr_bitsize, &rr_bitpos, &rr_mode,
4662 &rr_unsignedp, &volatilep, &rr_mask,
4665 /* It must be true that the inner operation on the lhs of each
4666 comparison must be the same if we are to be able to do anything.
4667 Then see if we have constants. If not, the same must be true for
4669 if (volatilep || ll_inner == 0 || rl_inner == 0
4670 || ! operand_equal_p (ll_inner, rl_inner, 0))
4673 if (TREE_CODE (lr_arg) == INTEGER_CST
4674 && TREE_CODE (rr_arg) == INTEGER_CST)
4675 l_const = lr_arg, r_const = rr_arg;
4676 else if (lr_inner == 0 || rr_inner == 0
4677 || ! operand_equal_p (lr_inner, rr_inner, 0))
4680 l_const = r_const = 0;
4682 /* If either comparison code is not correct for our logical operation,
4683 fail. However, we can convert a one-bit comparison against zero into
4684 the opposite comparison against that bit being set in the field. */
4686 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4687 if (lcode != wanted_code)
4689 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4691 /* Make the left operand unsigned, since we are only interested
4692 in the value of one bit. Otherwise we are doing the wrong
4701 /* This is analogous to the code for l_const above. */
4702 if (rcode != wanted_code)
4704 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4713 /* After this point all optimizations will generate bit-field
4714 references, which we might not want. */
4715 if (! lang_hooks.can_use_bit_fields_p ())
4718 /* See if we can find a mode that contains both fields being compared on
4719 the left. If we can't, fail. Otherwise, update all constants and masks
4720 to be relative to a field of that size. */
4721 first_bit = MIN (ll_bitpos, rl_bitpos);
4722 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4723 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4724 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4726 if (lnmode == VOIDmode)
4729 lnbitsize = GET_MODE_BITSIZE (lnmode);
4730 lnbitpos = first_bit & ~ (lnbitsize - 1);
4731 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4732 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4734 if (BYTES_BIG_ENDIAN)
4736 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4737 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4740 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4741 size_int (xll_bitpos), 0);
4742 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4743 size_int (xrl_bitpos), 0);
4747 l_const = fold_convert (lntype, l_const);
4748 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4749 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4750 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4751 fold (build1 (BIT_NOT_EXPR,
4755 warning ("comparison is always %d", wanted_code == NE_EXPR);
4757 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4762 r_const = fold_convert (lntype, r_const);
4763 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4764 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4765 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4766 fold (build1 (BIT_NOT_EXPR,
4770 warning ("comparison is always %d", wanted_code == NE_EXPR);
4772 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4776 /* If the right sides are not constant, do the same for it. Also,
4777 disallow this optimization if a size or signedness mismatch occurs
4778 between the left and right sides. */
4781 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4782 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4783 /* Make sure the two fields on the right
4784 correspond to the left without being swapped. */
4785 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4788 first_bit = MIN (lr_bitpos, rr_bitpos);
4789 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4790 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4791 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4793 if (rnmode == VOIDmode)
4796 rnbitsize = GET_MODE_BITSIZE (rnmode);
4797 rnbitpos = first_bit & ~ (rnbitsize - 1);
4798 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4799 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4801 if (BYTES_BIG_ENDIAN)
4803 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4804 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4807 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4808 size_int (xlr_bitpos), 0);
4809 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4810 size_int (xrr_bitpos), 0);
4812 /* Make a mask that corresponds to both fields being compared.
4813 Do this for both items being compared. If the operands are the
4814 same size and the bits being compared are in the same position
4815 then we can do this by masking both and comparing the masked
4817 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4818 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4819 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4821 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4822 ll_unsignedp || rl_unsignedp);
4823 if (! all_ones_mask_p (ll_mask, lnbitsize))
4824 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4826 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4827 lr_unsignedp || rr_unsignedp);
4828 if (! all_ones_mask_p (lr_mask, rnbitsize))
4829 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4831 return build2 (wanted_code, truth_type, lhs, rhs);
4834 /* There is still another way we can do something: If both pairs of
4835 fields being compared are adjacent, we may be able to make a wider
4836 field containing them both.
4838 Note that we still must mask the lhs/rhs expressions. Furthermore,
4839 the mask must be shifted to account for the shift done by
4840 make_bit_field_ref. */
4841 if ((ll_bitsize + ll_bitpos == rl_bitpos
4842 && lr_bitsize + lr_bitpos == rr_bitpos)
4843 || (ll_bitpos == rl_bitpos + rl_bitsize
4844 && lr_bitpos == rr_bitpos + rr_bitsize))
4848 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4849 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4850 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4851 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4853 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4854 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4855 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4856 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4858 /* Convert to the smaller type before masking out unwanted bits. */
4860 if (lntype != rntype)
4862 if (lnbitsize > rnbitsize)
4864 lhs = fold_convert (rntype, lhs);
4865 ll_mask = fold_convert (rntype, ll_mask);
4868 else if (lnbitsize < rnbitsize)
4870 rhs = fold_convert (lntype, rhs);
4871 lr_mask = fold_convert (lntype, lr_mask);
4876 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4877 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4879 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4880 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4882 return build2 (wanted_code, truth_type, lhs, rhs);
4888 /* Handle the case of comparisons with constants. If there is something in
4889 common between the masks, those bits of the constants must be the same.
4890 If not, the condition is always false. Test for this to avoid generating
4891 incorrect code below. */
4892 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4893 if (! integer_zerop (result)
4894 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4895 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4897 if (wanted_code == NE_EXPR)
4899 warning ("%<or%> of unmatched not-equal tests is always 1");
4900 return constant_boolean_node (true, truth_type);
4904 warning ("%<and%> of mutually exclusive equal-tests is always 0");
4905 return constant_boolean_node (false, truth_type);
4909 /* Construct the expression we will return. First get the component
4910 reference we will make. Unless the mask is all ones the width of
4911 that field, perform the mask operation. Then compare with the
4913 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4914 ll_unsignedp || rl_unsignedp);
4916 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4917 if (! all_ones_mask_p (ll_mask, lnbitsize))
4918 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4920 return build2 (wanted_code, truth_type, result,
4921 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4924 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4928 optimize_minmax_comparison (tree t)
4930 tree type = TREE_TYPE (t);
4931 tree arg0 = TREE_OPERAND (t, 0);
4932 enum tree_code op_code;
4933 tree comp_const = TREE_OPERAND (t, 1);
4935 int consts_equal, consts_lt;
4938 STRIP_SIGN_NOPS (arg0);
4940 op_code = TREE_CODE (arg0);
4941 minmax_const = TREE_OPERAND (arg0, 1);
4942 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4943 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4944 inner = TREE_OPERAND (arg0, 0);
4946 /* If something does not permit us to optimize, return the original tree. */
4947 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4948 || TREE_CODE (comp_const) != INTEGER_CST
4949 || TREE_CONSTANT_OVERFLOW (comp_const)
4950 || TREE_CODE (minmax_const) != INTEGER_CST
4951 || TREE_CONSTANT_OVERFLOW (minmax_const))
4954 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4955 and GT_EXPR, doing the rest with recursive calls using logical
4957 switch (TREE_CODE (t))
4959 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4961 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4965 fold (build2 (TRUTH_ORIF_EXPR, type,
4966 optimize_minmax_comparison
4967 (build2 (EQ_EXPR, type, arg0, comp_const)),
4968 optimize_minmax_comparison
4969 (build2 (GT_EXPR, type, arg0, comp_const))));
4972 if (op_code == MAX_EXPR && consts_equal)
4973 /* MAX (X, 0) == 0 -> X <= 0 */
4974 return fold (build2 (LE_EXPR, type, inner, comp_const));
4976 else if (op_code == MAX_EXPR && consts_lt)
4977 /* MAX (X, 0) == 5 -> X == 5 */
4978 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4980 else if (op_code == MAX_EXPR)
4981 /* MAX (X, 0) == -1 -> false */
4982 return omit_one_operand (type, integer_zero_node, inner);
4984 else if (consts_equal)
4985 /* MIN (X, 0) == 0 -> X >= 0 */
4986 return fold (build2 (GE_EXPR, type, inner, comp_const));
4989 /* MIN (X, 0) == 5 -> false */
4990 return omit_one_operand (type, integer_zero_node, inner);
4993 /* MIN (X, 0) == -1 -> X == -1 */
4994 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4997 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4998 /* MAX (X, 0) > 0 -> X > 0
4999 MAX (X, 0) > 5 -> X > 5 */
5000 return fold (build2 (GT_EXPR, type, inner, comp_const));
5002 else if (op_code == MAX_EXPR)
5003 /* MAX (X, 0) > -1 -> true */
5004 return omit_one_operand (type, integer_one_node, inner);
5006 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5007 /* MIN (X, 0) > 0 -> false
5008 MIN (X, 0) > 5 -> false */
5009 return omit_one_operand (type, integer_zero_node, inner);
5012 /* MIN (X, 0) > -1 -> X > -1 */
5013 return fold (build2 (GT_EXPR, type, inner, comp_const));
5020 /* T is an integer expression that is being multiplied, divided, or taken a
5021 modulus (CODE says which and what kind of divide or modulus) by a
5022 constant C. See if we can eliminate that operation by folding it with
5023 other operations already in T. WIDE_TYPE, if non-null, is a type that
5024 should be used for the computation if wider than our type.
5026 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5027 (X * 2) + (Y * 4). We must, however, be assured that either the original
5028 expression would not overflow or that overflow is undefined for the type
5029 in the language in question.
5031 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5032 the machine has a multiply-accumulate insn or that this is part of an
5033 addressing calculation.
5035 If we return a non-null expression, it is an equivalent form of the
5036 original computation, but need not be in the original type. */
5039 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5041 /* To avoid exponential search depth, refuse to allow recursion past
5042 three levels. Beyond that (1) it's highly unlikely that we'll find
5043 something interesting and (2) we've probably processed it before
5044 when we built the inner expression. */
5053 ret = extract_muldiv_1 (t, c, code, wide_type);
5060 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5062 tree type = TREE_TYPE (t);
5063 enum tree_code tcode = TREE_CODE (t);
5064 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5065 > GET_MODE_SIZE (TYPE_MODE (type)))
5066 ? wide_type : type);
5068 int same_p = tcode == code;
5069 tree op0 = NULL_TREE, op1 = NULL_TREE;
5071 /* Don't deal with constants of zero here; they confuse the code below. */
5072 if (integer_zerop (c))
5075 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5076 op0 = TREE_OPERAND (t, 0);
5078 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5079 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5081 /* Note that we need not handle conditional operations here since fold
5082 already handles those cases. So just do arithmetic here. */
5086 /* For a constant, we can always simplify if we are a multiply
5087 or (for divide and modulus) if it is a multiple of our constant. */
5088 if (code == MULT_EXPR
5089 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5090 return const_binop (code, fold_convert (ctype, t),
5091 fold_convert (ctype, c), 0);
5094 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5095 /* If op0 is an expression ... */
5096 if ((COMPARISON_CLASS_P (op0)
5097 || UNARY_CLASS_P (op0)
5098 || BINARY_CLASS_P (op0)
5099 || EXPRESSION_CLASS_P (op0))
5100 /* ... and is unsigned, and its type is smaller than ctype,
5101 then we cannot pass through as widening. */
5102 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5103 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5104 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5105 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5106 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5107 /* ... or this is a truncation (t is narrower than op0),
5108 then we cannot pass through this narrowing. */
5109 || (GET_MODE_SIZE (TYPE_MODE (type))
5110 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5111 /* ... or signedness changes for division or modulus,
5112 then we cannot pass through this conversion. */
5113 || (code != MULT_EXPR
5114 && (TYPE_UNSIGNED (ctype)
5115 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5118 /* Pass the constant down and see if we can make a simplification. If
5119 we can, replace this expression with the inner simplification for
5120 possible later conversion to our or some other type. */
5121 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5122 && TREE_CODE (t2) == INTEGER_CST
5123 && ! TREE_CONSTANT_OVERFLOW (t2)
5124 && (0 != (t1 = extract_muldiv (op0, t2, code,
5126 ? ctype : NULL_TREE))))
5131 /* If widening the type changes it from signed to unsigned, then we
5132 must avoid building ABS_EXPR itself as unsigned. */
5133 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5135 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5136 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5138 t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
5139 return fold_convert (ctype, t1);
5145 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5146 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5149 case MIN_EXPR: case MAX_EXPR:
5150 /* If widening the type changes the signedness, then we can't perform
5151 this optimization as that changes the result. */
5152 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5155 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5156 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5157 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5159 if (tree_int_cst_sgn (c) < 0)
5160 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5162 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5163 fold_convert (ctype, t2)));
5167 case LSHIFT_EXPR: case RSHIFT_EXPR:
5168 /* If the second operand is constant, this is a multiplication
5169 or floor division, by a power of two, so we can treat it that
5170 way unless the multiplier or divisor overflows. Signed
5171 left-shift overflow is implementation-defined rather than
5172 undefined in C90, so do not convert signed left shift into
5174 if (TREE_CODE (op1) == INTEGER_CST
5175 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5176 /* const_binop may not detect overflow correctly,
5177 so check for it explicitly here. */
5178 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5179 && TREE_INT_CST_HIGH (op1) == 0
5180 && 0 != (t1 = fold_convert (ctype,
5181 const_binop (LSHIFT_EXPR,
5184 && ! TREE_OVERFLOW (t1))
5185 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5186 ? MULT_EXPR : FLOOR_DIV_EXPR,
5187 ctype, fold_convert (ctype, op0), t1),
5188 c, code, wide_type);
5191 case PLUS_EXPR: case MINUS_EXPR:
5192 /* See if we can eliminate the operation on both sides. If we can, we
5193 can return a new PLUS or MINUS. If we can't, the only remaining
5194 cases where we can do anything are if the second operand is a
5196 t1 = extract_muldiv (op0, c, code, wide_type);
5197 t2 = extract_muldiv (op1, c, code, wide_type);
5198 if (t1 != 0 && t2 != 0
5199 && (code == MULT_EXPR
5200 /* If not multiplication, we can only do this if both operands
5201 are divisible by c. */
5202 || (multiple_of_p (ctype, op0, c)
5203 && multiple_of_p (ctype, op1, c))))
5204 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5205 fold_convert (ctype, t2)));
5207 /* If this was a subtraction, negate OP1 and set it to be an addition.
5208 This simplifies the logic below. */
5209 if (tcode == MINUS_EXPR)
5210 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5212 if (TREE_CODE (op1) != INTEGER_CST)
5215 /* If either OP1 or C are negative, this optimization is not safe for
5216 some of the division and remainder types while for others we need
5217 to change the code. */
5218 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5220 if (code == CEIL_DIV_EXPR)
5221 code = FLOOR_DIV_EXPR;
5222 else if (code == FLOOR_DIV_EXPR)
5223 code = CEIL_DIV_EXPR;
5224 else if (code != MULT_EXPR
5225 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5229 /* If it's a multiply or a division/modulus operation of a multiple
5230 of our constant, do the operation and verify it doesn't overflow. */
5231 if (code == MULT_EXPR
5232 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5234 op1 = const_binop (code, fold_convert (ctype, op1),
5235 fold_convert (ctype, c), 0);
5236 /* We allow the constant to overflow with wrapping semantics. */
5238 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5244 /* If we have an unsigned type is not a sizetype, we cannot widen
5245 the operation since it will change the result if the original
5246 computation overflowed. */
5247 if (TYPE_UNSIGNED (ctype)
5248 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5252 /* If we were able to eliminate our operation from the first side,
5253 apply our operation to the second side and reform the PLUS. */
5254 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5255 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5257 /* The last case is if we are a multiply. In that case, we can
5258 apply the distributive law to commute the multiply and addition
5259 if the multiplication of the constants doesn't overflow. */
5260 if (code == MULT_EXPR)
5261 return fold (build2 (tcode, ctype,
5262 fold (build2 (code, ctype,
5263 fold_convert (ctype, op0),
5264 fold_convert (ctype, c))),
5270 /* We have a special case here if we are doing something like
5271 (C * 8) % 4 since we know that's zero. */
5272 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5273 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5274 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5275 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5276 return omit_one_operand (type, integer_zero_node, op0);
5278 /* ... fall through ... */
5280 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5281 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5282 /* If we can extract our operation from the LHS, do so and return a
5283 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5284 do something only if the second operand is a constant. */
5286 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5287 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5288 fold_convert (ctype, op1)));
5289 else if (tcode == MULT_EXPR && code == MULT_EXPR
5290 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5291 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5292 fold_convert (ctype, t1)));
5293 else if (TREE_CODE (op1) != INTEGER_CST)
5296 /* If these are the same operation types, we can associate them
5297 assuming no overflow. */
5299 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5300 fold_convert (ctype, c), 0))
5301 && ! TREE_OVERFLOW (t1))
5302 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5304 /* If these operations "cancel" each other, we have the main
5305 optimizations of this pass, which occur when either constant is a
5306 multiple of the other, in which case we replace this with either an
5307 operation or CODE or TCODE.
5309 If we have an unsigned type that is not a sizetype, we cannot do
5310 this since it will change the result if the original computation
5312 if ((! TYPE_UNSIGNED (ctype)
5313 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5315 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5316 || (tcode == MULT_EXPR
5317 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5318 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5320 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5321 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5322 fold_convert (ctype,
5323 const_binop (TRUNC_DIV_EXPR,
5325 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5326 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5327 fold_convert (ctype,
5328 const_binop (TRUNC_DIV_EXPR,
5340 /* Return a node which has the indicated constant VALUE (either 0 or
5341 1), and is of the indicated TYPE. */
5344 constant_boolean_node (int value, tree type)
5346 if (type == integer_type_node)
5347 return value ? integer_one_node : integer_zero_node;
5348 else if (type == boolean_type_node)
5349 return value ? boolean_true_node : boolean_false_node;
5350 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5351 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5352 : integer_zero_node);
5354 return build_int_cst (type, value);
5357 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5358 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5359 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5360 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5361 COND is the first argument to CODE; otherwise (as in the example
5362 given here), it is the second argument. TYPE is the type of the
5363 original expression. Return NULL_TREE if no simplification is
5367 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5368 tree cond, tree arg, int cond_first_p)
5370 tree test, true_value, false_value;
5371 tree lhs = NULL_TREE;
5372 tree rhs = NULL_TREE;
5374 /* This transformation is only worthwhile if we don't have to wrap
5375 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5376 one of the branches once its pushed inside the COND_EXPR. */
5377 if (!TREE_CONSTANT (arg))
5380 if (TREE_CODE (cond) == COND_EXPR)
5382 test = TREE_OPERAND (cond, 0);
5383 true_value = TREE_OPERAND (cond, 1);
5384 false_value = TREE_OPERAND (cond, 2);
5385 /* If this operand throws an expression, then it does not make
5386 sense to try to perform a logical or arithmetic operation
5388 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5390 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5395 tree testtype = TREE_TYPE (cond);
5397 true_value = constant_boolean_node (true, testtype);
5398 false_value = constant_boolean_node (false, testtype);
5402 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5403 : build2 (code, type, arg, true_value));
5405 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5406 : build2 (code, type, arg, false_value));
5408 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5409 return fold_convert (type, test);
5413 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5415 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5416 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5417 ADDEND is the same as X.
5419 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5420 and finite. The problematic cases are when X is zero, and its mode
5421 has signed zeros. In the case of rounding towards -infinity,
5422 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5423 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5426 fold_real_zero_addition_p (tree type, tree addend, int negate)
5428 if (!real_zerop (addend))
5431 /* Don't allow the fold with -fsignaling-nans. */
5432 if (HONOR_SNANS (TYPE_MODE (type)))
5435 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5436 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5439 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5440 if (TREE_CODE (addend) == REAL_CST
5441 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5444 /* The mode has signed zeros, and we have to honor their sign.
5445 In this situation, there is only one case we can return true for.
5446 X - 0 is the same as X unless rounding towards -infinity is
5448 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5451 /* Subroutine of fold() that checks comparisons of built-in math
5452 functions against real constants.
5454 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5455 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5456 is the type of the result and ARG0 and ARG1 are the operands of the
5457 comparison. ARG1 must be a TREE_REAL_CST.
5459 The function returns the constant folded tree if a simplification
5460 can be made, and NULL_TREE otherwise. */
5463 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5464 tree type, tree arg0, tree arg1)
5468 if (BUILTIN_SQRT_P (fcode))
5470 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5471 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5473 c = TREE_REAL_CST (arg1);
5474 if (REAL_VALUE_NEGATIVE (c))
5476 /* sqrt(x) < y is always false, if y is negative. */
5477 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5478 return omit_one_operand (type, integer_zero_node, arg);
5480 /* sqrt(x) > y is always true, if y is negative and we
5481 don't care about NaNs, i.e. negative values of x. */
5482 if (code == NE_EXPR || !HONOR_NANS (mode))
5483 return omit_one_operand (type, integer_one_node, arg);
5485 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5486 return fold (build2 (GE_EXPR, type, arg,
5487 build_real (TREE_TYPE (arg), dconst0)));
5489 else if (code == GT_EXPR || code == GE_EXPR)
5493 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5494 real_convert (&c2, mode, &c2);
5496 if (REAL_VALUE_ISINF (c2))
5498 /* sqrt(x) > y is x == +Inf, when y is very large. */
5499 if (HONOR_INFINITIES (mode))
5500 return fold (build2 (EQ_EXPR, type, arg,
5501 build_real (TREE_TYPE (arg), c2)));
5503 /* sqrt(x) > y is always false, when y is very large
5504 and we don't care about infinities. */
5505 return omit_one_operand (type, integer_zero_node, arg);
5508 /* sqrt(x) > c is the same as x > c*c. */
5509 return fold (build2 (code, type, arg,
5510 build_real (TREE_TYPE (arg), c2)));
5512 else if (code == LT_EXPR || code == LE_EXPR)
5516 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5517 real_convert (&c2, mode, &c2);
5519 if (REAL_VALUE_ISINF (c2))
5521 /* sqrt(x) < y is always true, when y is a very large
5522 value and we don't care about NaNs or Infinities. */
5523 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5524 return omit_one_operand (type, integer_one_node, arg);
5526 /* sqrt(x) < y is x != +Inf when y is very large and we
5527 don't care about NaNs. */
5528 if (! HONOR_NANS (mode))
5529 return fold (build2 (NE_EXPR, type, arg,
5530 build_real (TREE_TYPE (arg), c2)));
5532 /* sqrt(x) < y is x >= 0 when y is very large and we
5533 don't care about Infinities. */
5534 if (! HONOR_INFINITIES (mode))
5535 return fold (build2 (GE_EXPR, type, arg,
5536 build_real (TREE_TYPE (arg), dconst0)));
5538 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5539 if (lang_hooks.decls.global_bindings_p () != 0
5540 || CONTAINS_PLACEHOLDER_P (arg))
5543 arg = save_expr (arg);
5544 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5545 fold (build2 (GE_EXPR, type, arg,
5546 build_real (TREE_TYPE (arg),
5548 fold (build2 (NE_EXPR, type, arg,
5549 build_real (TREE_TYPE (arg),
5553 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5554 if (! HONOR_NANS (mode))
5555 return fold (build2 (code, type, arg,
5556 build_real (TREE_TYPE (arg), c2)));
5558 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5559 if (lang_hooks.decls.global_bindings_p () == 0
5560 && ! CONTAINS_PLACEHOLDER_P (arg))
5562 arg = save_expr (arg);
5563 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5564 fold (build2 (GE_EXPR, type, arg,
5565 build_real (TREE_TYPE (arg),
5567 fold (build2 (code, type, arg,
5568 build_real (TREE_TYPE (arg),
5577 /* Subroutine of fold() that optimizes comparisons against Infinities,
5578 either +Inf or -Inf.
5580 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5581 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5582 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5584 The function returns the constant folded tree if a simplification
5585 can be made, and NULL_TREE otherwise. */
5588 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5590 enum machine_mode mode;
5591 REAL_VALUE_TYPE max;
5595 mode = TYPE_MODE (TREE_TYPE (arg0));
5597 /* For negative infinity swap the sense of the comparison. */
5598 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5600 code = swap_tree_comparison (code);
5605 /* x > +Inf is always false, if with ignore sNANs. */
5606 if (HONOR_SNANS (mode))
5608 return omit_one_operand (type, integer_zero_node, arg0);
5611 /* x <= +Inf is always true, if we don't case about NaNs. */
5612 if (! HONOR_NANS (mode))
5613 return omit_one_operand (type, integer_one_node, arg0);
5615 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5616 if (lang_hooks.decls.global_bindings_p () == 0
5617 && ! CONTAINS_PLACEHOLDER_P (arg0))
5619 arg0 = save_expr (arg0);
5620 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5626 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5627 real_maxval (&max, neg, mode);
5628 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5629 arg0, build_real (TREE_TYPE (arg0), max)));
5632 /* x < +Inf is always equal to x <= DBL_MAX. */
5633 real_maxval (&max, neg, mode);
5634 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5635 arg0, build_real (TREE_TYPE (arg0), max)));
5638 /* x != +Inf is always equal to !(x > DBL_MAX). */
5639 real_maxval (&max, neg, mode);
5640 if (! HONOR_NANS (mode))
5641 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5642 arg0, build_real (TREE_TYPE (arg0), max)));
5644 /* The transformation below creates non-gimple code and thus is
5645 not appropriate if we are in gimple form. */
5649 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5650 arg0, build_real (TREE_TYPE (arg0), max)));
5651 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5660 /* Subroutine of fold() that optimizes comparisons of a division by
5661 a nonzero integer constant against an integer constant, i.e.
5664 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5665 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5666 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5668 The function returns the constant folded tree if a simplification
5669 can be made, and NULL_TREE otherwise. */
5672 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5674 tree prod, tmp, hi, lo;
5675 tree arg00 = TREE_OPERAND (arg0, 0);
5676 tree arg01 = TREE_OPERAND (arg0, 1);
5677 unsigned HOST_WIDE_INT lpart;
5678 HOST_WIDE_INT hpart;
5681 /* We have to do this the hard way to detect unsigned overflow.
5682 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5683 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5684 TREE_INT_CST_HIGH (arg01),
5685 TREE_INT_CST_LOW (arg1),
5686 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5687 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5688 prod = force_fit_type (prod, -1, overflow, false);
5690 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5692 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5695 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5696 overflow = add_double (TREE_INT_CST_LOW (prod),
5697 TREE_INT_CST_HIGH (prod),
5698 TREE_INT_CST_LOW (tmp),
5699 TREE_INT_CST_HIGH (tmp),
5701 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5702 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5703 TREE_CONSTANT_OVERFLOW (prod));
5705 else if (tree_int_cst_sgn (arg01) >= 0)
5707 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5708 switch (tree_int_cst_sgn (arg1))
5711 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5716 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5721 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5731 /* A negative divisor reverses the relational operators. */
5732 code = swap_tree_comparison (code);
5734 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5735 switch (tree_int_cst_sgn (arg1))
5738 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5743 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5748 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5760 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5761 return omit_one_operand (type, integer_zero_node, arg00);
5762 if (TREE_OVERFLOW (hi))
5763 return fold (build2 (GE_EXPR, type, arg00, lo));
5764 if (TREE_OVERFLOW (lo))
5765 return fold (build2 (LE_EXPR, type, arg00, hi));
5766 return build_range_check (type, arg00, 1, lo, hi);
5769 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5770 return omit_one_operand (type, integer_one_node, arg00);
5771 if (TREE_OVERFLOW (hi))
5772 return fold (build2 (LT_EXPR, type, arg00, lo));
5773 if (TREE_OVERFLOW (lo))
5774 return fold (build2 (GT_EXPR, type, arg00, hi));
5775 return build_range_check (type, arg00, 0, lo, hi);
5778 if (TREE_OVERFLOW (lo))
5779 return omit_one_operand (type, integer_zero_node, arg00);
5780 return fold (build2 (LT_EXPR, type, arg00, lo));
5783 if (TREE_OVERFLOW (hi))
5784 return omit_one_operand (type, integer_one_node, arg00);
5785 return fold (build2 (LE_EXPR, type, arg00, hi));
5788 if (TREE_OVERFLOW (hi))
5789 return omit_one_operand (type, integer_zero_node, arg00);
5790 return fold (build2 (GT_EXPR, type, arg00, hi));
5793 if (TREE_OVERFLOW (lo))
5794 return omit_one_operand (type, integer_one_node, arg00);
5795 return fold (build2 (GE_EXPR, type, arg00, lo));
5805 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5806 equality/inequality test, then return a simplified form of
5807 the test using shifts and logical operations. Otherwise return
5808 NULL. TYPE is the desired result type. */
5811 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5814 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5816 if (code == TRUTH_NOT_EXPR)
5818 code = TREE_CODE (arg0);
5819 if (code != NE_EXPR && code != EQ_EXPR)
5822 /* Extract the arguments of the EQ/NE. */
5823 arg1 = TREE_OPERAND (arg0, 1);
5824 arg0 = TREE_OPERAND (arg0, 0);
5826 /* This requires us to invert the code. */
5827 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5830 /* If this is testing a single bit, we can optimize the test. */
5831 if ((code == NE_EXPR || code == EQ_EXPR)
5832 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5833 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5835 tree inner = TREE_OPERAND (arg0, 0);
5836 tree type = TREE_TYPE (arg0);
5837 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5838 enum machine_mode operand_mode = TYPE_MODE (type);
5840 tree signed_type, unsigned_type, intermediate_type;
5843 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5844 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5845 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5846 if (arg00 != NULL_TREE
5847 /* This is only a win if casting to a signed type is cheap,
5848 i.e. when arg00's type is not a partial mode. */
5849 && TYPE_PRECISION (TREE_TYPE (arg00))
5850 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5852 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5853 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5854 result_type, fold_convert (stype, arg00),
5855 fold_convert (stype, integer_zero_node)));
5858 /* Otherwise we have (A & C) != 0 where C is a single bit,
5859 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5860 Similarly for (A & C) == 0. */
5862 /* If INNER is a right shift of a constant and it plus BITNUM does
5863 not overflow, adjust BITNUM and INNER. */
5864 if (TREE_CODE (inner) == RSHIFT_EXPR
5865 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5866 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5867 && bitnum < TYPE_PRECISION (type)
5868 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5869 bitnum - TYPE_PRECISION (type)))
5871 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5872 inner = TREE_OPERAND (inner, 0);
5875 /* If we are going to be able to omit the AND below, we must do our
5876 operations as unsigned. If we must use the AND, we have a choice.
5877 Normally unsigned is faster, but for some machines signed is. */
5878 #ifdef LOAD_EXTEND_OP
5879 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5884 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5885 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5886 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5887 inner = fold_convert (intermediate_type, inner);
5890 inner = build2 (RSHIFT_EXPR, intermediate_type,
5891 inner, size_int (bitnum));
5893 if (code == EQ_EXPR)
5894 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5895 inner, integer_one_node));
5897 /* Put the AND last so it can combine with more things. */
5898 inner = build2 (BIT_AND_EXPR, intermediate_type,
5899 inner, integer_one_node);
5901 /* Make sure to return the proper type. */
5902 inner = fold_convert (result_type, inner);
5909 /* Check whether we are allowed to reorder operands arg0 and arg1,
5910 such that the evaluation of arg1 occurs before arg0. */
5913 reorder_operands_p (tree arg0, tree arg1)
5915 if (! flag_evaluation_order)
5917 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5919 return ! TREE_SIDE_EFFECTS (arg0)
5920 && ! TREE_SIDE_EFFECTS (arg1);
5923 /* Test whether it is preferable two swap two operands, ARG0 and
5924 ARG1, for example because ARG0 is an integer constant and ARG1
5925 isn't. If REORDER is true, only recommend swapping if we can
5926 evaluate the operands in reverse order. */
5929 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5931 STRIP_SIGN_NOPS (arg0);
5932 STRIP_SIGN_NOPS (arg1);
5934 if (TREE_CODE (arg1) == INTEGER_CST)
5936 if (TREE_CODE (arg0) == INTEGER_CST)
5939 if (TREE_CODE (arg1) == REAL_CST)
5941 if (TREE_CODE (arg0) == REAL_CST)
5944 if (TREE_CODE (arg1) == COMPLEX_CST)
5946 if (TREE_CODE (arg0) == COMPLEX_CST)
5949 if (TREE_CONSTANT (arg1))
5951 if (TREE_CONSTANT (arg0))
5957 if (reorder && flag_evaluation_order
5958 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5966 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5967 for commutative and comparison operators. Ensuring a canonical
5968 form allows the optimizers to find additional redundancies without
5969 having to explicitly check for both orderings. */
5970 if (TREE_CODE (arg0) == SSA_NAME
5971 && TREE_CODE (arg1) == SSA_NAME
5972 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5978 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
5979 ARG0 is extended to a wider type. */
5982 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
5984 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
5986 tree shorter_type, outer_type;
5990 if (arg0_unw == arg0)
5992 shorter_type = TREE_TYPE (arg0_unw);
5994 arg1_unw = get_unwidened (arg1, shorter_type);
5998 /* If possible, express the comparison in the shorter mode. */
5999 if ((code == EQ_EXPR || code == NE_EXPR
6000 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6001 && (TREE_TYPE (arg1_unw) == shorter_type
6002 || (TREE_CODE (arg1_unw) == INTEGER_CST
6003 && TREE_CODE (shorter_type) == INTEGER_TYPE
6004 && int_fits_type_p (arg1_unw, shorter_type))))
6005 return fold (build (code, type, arg0_unw,
6006 fold_convert (shorter_type, arg1_unw)));
6008 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6011 /* If we are comparing with the integer that does not fit into the range
6012 of the shorter type, the result is known. */
6013 outer_type = TREE_TYPE (arg1_unw);
6014 min = lower_bound_in_type (outer_type, shorter_type);
6015 max = upper_bound_in_type (outer_type, shorter_type);
6017 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6019 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6026 return omit_one_operand (type, integer_zero_node, arg0);
6031 return omit_one_operand (type, integer_one_node, arg0);
6037 return omit_one_operand (type, integer_one_node, arg0);
6039 return omit_one_operand (type, integer_zero_node, arg0);
6044 return omit_one_operand (type, integer_zero_node, arg0);
6046 return omit_one_operand (type, integer_one_node, arg0);
6055 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6056 ARG0 just the signedness is changed. */
6059 fold_sign_changed_comparison (enum tree_code code, tree type,
6060 tree arg0, tree arg1)
6062 tree arg0_inner, tmp;
6063 tree inner_type, outer_type;
6065 if (TREE_CODE (arg0) != NOP_EXPR)
6068 outer_type = TREE_TYPE (arg0);
6069 arg0_inner = TREE_OPERAND (arg0, 0);
6070 inner_type = TREE_TYPE (arg0_inner);
6072 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6075 if (TREE_CODE (arg1) != INTEGER_CST
6076 && !(TREE_CODE (arg1) == NOP_EXPR
6077 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6080 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6085 if (TREE_CODE (arg1) == INTEGER_CST)
6087 tmp = build_int_cst_wide (inner_type,
6088 TREE_INT_CST_LOW (arg1),
6089 TREE_INT_CST_HIGH (arg1));
6090 arg1 = force_fit_type (tmp, 0,
6091 TREE_OVERFLOW (arg1),
6092 TREE_CONSTANT_OVERFLOW (arg1));
6095 arg1 = fold_convert (inner_type, arg1);
6097 return fold (build (code, type, arg0_inner, arg1));
6100 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6101 step of the array. TYPE is the type of the expression. ADDR is the address.
6102 MULT is the multiplicative expression. If the function succeeds, the new
6103 address expression is returned. Otherwise NULL_TREE is returned. */
6106 try_move_mult_to_index (tree type, enum tree_code code, tree addr, tree mult)
6108 tree s, delta, step;
6109 tree arg0 = TREE_OPERAND (mult, 0), arg1 = TREE_OPERAND (mult, 1);
6110 tree ref = TREE_OPERAND (addr, 0), pref;
6117 if (TREE_CODE (arg0) == INTEGER_CST)
6122 else if (TREE_CODE (arg1) == INTEGER_CST)
6130 for (;; ref = TREE_OPERAND (ref, 0))
6132 if (TREE_CODE (ref) == ARRAY_REF)
6134 step = array_ref_element_size (ref);
6136 if (TREE_CODE (step) != INTEGER_CST)
6139 itype = TREE_TYPE (step);
6141 /* If the type sizes do not match, we might run into problems
6142 when one of them would overflow. */
6143 if (TYPE_PRECISION (itype) != TYPE_PRECISION (type))
6146 if (!operand_equal_p (step, fold_convert (itype, s), 0))
6149 delta = fold_convert (itype, delta);
6153 if (!handled_component_p (ref))
6157 /* We found the suitable array reference. So copy everything up to it,
6158 and replace the index. */
6160 pref = TREE_OPERAND (addr, 0);
6161 ret = copy_node (pref);
6166 pref = TREE_OPERAND (pref, 0);
6167 TREE_OPERAND (pos, 0) = copy_node (pref);
6168 pos = TREE_OPERAND (pos, 0);
6171 TREE_OPERAND (pos, 1) = fold (build2 (code, itype,
6172 TREE_OPERAND (pos, 1),
6175 return build1 (ADDR_EXPR, type, ret);
6179 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6180 means A >= Y && A != MAX, but in this case we know that
6181 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6184 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6186 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6188 if (TREE_CODE (bound) == LT_EXPR)
6189 a = TREE_OPERAND (bound, 0);
6190 else if (TREE_CODE (bound) == GT_EXPR)
6191 a = TREE_OPERAND (bound, 1);
6195 typea = TREE_TYPE (a);
6196 if (!INTEGRAL_TYPE_P (typea)
6197 && !POINTER_TYPE_P (typea))
6200 if (TREE_CODE (ineq) == LT_EXPR)
6202 a1 = TREE_OPERAND (ineq, 1);
6203 y = TREE_OPERAND (ineq, 0);
6205 else if (TREE_CODE (ineq) == GT_EXPR)
6207 a1 = TREE_OPERAND (ineq, 0);
6208 y = TREE_OPERAND (ineq, 1);
6213 if (TREE_TYPE (a1) != typea)
6216 diff = fold (build2 (MINUS_EXPR, typea, a1, a));
6217 if (!integer_onep (diff))
6220 return fold (build2 (GE_EXPR, type, a, y));
6223 /* Perform constant folding and related simplification of EXPR.
6224 The related simplifications include x*1 => x, x*0 => 0, etc.,
6225 and application of the associative law.
6226 NOP_EXPR conversions may be removed freely (as long as we
6227 are careful not to change the type of the overall expression).
6228 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
6229 but we can constant-fold them if they have constant operands. */
6231 #ifdef ENABLE_FOLD_CHECKING
6232 # define fold(x) fold_1 (x)
6233 static tree fold_1 (tree);
6239 const tree t = expr;
6240 const tree type = TREE_TYPE (expr);
6241 tree t1 = NULL_TREE;
6243 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6244 enum tree_code code = TREE_CODE (t);
6245 enum tree_code_class kind = TREE_CODE_CLASS (code);
6247 /* WINS will be nonzero when the switch is done
6248 if all operands are constant. */
6251 /* Return right away if a constant. */
6252 if (kind == tcc_constant)
6255 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6259 /* Special case for conversion ops that can have fixed point args. */
6260 arg0 = TREE_OPERAND (t, 0);
6262 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6264 STRIP_SIGN_NOPS (arg0);
6266 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6267 subop = TREE_REALPART (arg0);
6271 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6272 && TREE_CODE (subop) != REAL_CST)
6273 /* Note that TREE_CONSTANT isn't enough:
6274 static var addresses are constant but we can't
6275 do arithmetic on them. */
6278 else if (IS_EXPR_CODE_CLASS (kind))
6280 int len = TREE_CODE_LENGTH (code);
6282 for (i = 0; i < len; i++)
6284 tree op = TREE_OPERAND (t, i);
6288 continue; /* Valid for CALL_EXPR, at least. */
6290 /* Strip any conversions that don't change the mode. This is
6291 safe for every expression, except for a comparison expression
6292 because its signedness is derived from its operands. So, in
6293 the latter case, only strip conversions that don't change the
6296 Note that this is done as an internal manipulation within the
6297 constant folder, in order to find the simplest representation
6298 of the arguments so that their form can be studied. In any
6299 cases, the appropriate type conversions should be put back in
6300 the tree that will get out of the constant folder. */
6301 if (kind == tcc_comparison)
6302 STRIP_SIGN_NOPS (op);
6306 if (TREE_CODE (op) == COMPLEX_CST)
6307 subop = TREE_REALPART (op);
6311 if (TREE_CODE (subop) != INTEGER_CST
6312 && TREE_CODE (subop) != REAL_CST)
6313 /* Note that TREE_CONSTANT isn't enough:
6314 static var addresses are constant but we can't
6315 do arithmetic on them. */
6325 /* If this is a commutative operation, and ARG0 is a constant, move it
6326 to ARG1 to reduce the number of tests below. */
6327 if (commutative_tree_code (code)
6328 && tree_swap_operands_p (arg0, arg1, true))
6329 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6330 TREE_OPERAND (t, 0)));
6332 /* Now WINS is set as described above,
6333 ARG0 is the first operand of EXPR,
6334 and ARG1 is the second operand (if it has more than one operand).
6336 First check for cases where an arithmetic operation is applied to a
6337 compound, conditional, or comparison operation. Push the arithmetic
6338 operation inside the compound or conditional to see if any folding
6339 can then be done. Convert comparison to conditional for this purpose.
6340 The also optimizes non-constant cases that used to be done in
6343 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6344 one of the operands is a comparison and the other is a comparison, a
6345 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6346 code below would make the expression more complex. Change it to a
6347 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6348 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6350 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6351 || code == EQ_EXPR || code == NE_EXPR)
6352 && ((truth_value_p (TREE_CODE (arg0))
6353 && (truth_value_p (TREE_CODE (arg1))
6354 || (TREE_CODE (arg1) == BIT_AND_EXPR
6355 && integer_onep (TREE_OPERAND (arg1, 1)))))
6356 || (truth_value_p (TREE_CODE (arg1))
6357 && (truth_value_p (TREE_CODE (arg0))
6358 || (TREE_CODE (arg0) == BIT_AND_EXPR
6359 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6361 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6362 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6364 type, fold_convert (boolean_type_node, arg0),
6365 fold_convert (boolean_type_node, arg1)));
6367 if (code == EQ_EXPR)
6368 tem = invert_truthvalue (tem);
6373 if (TREE_CODE_CLASS (code) == tcc_unary)
6375 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6376 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6377 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6378 else if (TREE_CODE (arg0) == COND_EXPR)
6380 tree arg01 = TREE_OPERAND (arg0, 1);
6381 tree arg02 = TREE_OPERAND (arg0, 2);
6382 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6383 arg01 = fold (build1 (code, type, arg01));
6384 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6385 arg02 = fold (build1 (code, type, arg02));
6386 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6389 /* If this was a conversion, and all we did was to move into
6390 inside the COND_EXPR, bring it back out. But leave it if
6391 it is a conversion from integer to integer and the
6392 result precision is no wider than a word since such a
6393 conversion is cheap and may be optimized away by combine,
6394 while it couldn't if it were outside the COND_EXPR. Then return
6395 so we don't get into an infinite recursion loop taking the
6396 conversion out and then back in. */
6398 if ((code == NOP_EXPR || code == CONVERT_EXPR
6399 || code == NON_LVALUE_EXPR)
6400 && TREE_CODE (tem) == COND_EXPR
6401 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6402 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6403 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6404 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6405 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6406 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6407 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6409 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6410 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6411 tem = build1 (code, type,
6413 TREE_TYPE (TREE_OPERAND
6414 (TREE_OPERAND (tem, 1), 0)),
6415 TREE_OPERAND (tem, 0),
6416 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6417 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6420 else if (COMPARISON_CLASS_P (arg0))
6422 if (TREE_CODE (type) == BOOLEAN_TYPE)
6424 arg0 = copy_node (arg0);
6425 TREE_TYPE (arg0) = type;
6428 else if (TREE_CODE (type) != INTEGER_TYPE)
6429 return fold (build3 (COND_EXPR, type, arg0,
6430 fold (build1 (code, type,
6432 fold (build1 (code, type,
6433 integer_zero_node))));
6436 else if (TREE_CODE_CLASS (code) == tcc_comparison
6437 && TREE_CODE (arg0) == COMPOUND_EXPR)
6438 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6439 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6440 else if (TREE_CODE_CLASS (code) == tcc_comparison
6441 && TREE_CODE (arg1) == COMPOUND_EXPR)
6442 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6443 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6444 else if (TREE_CODE_CLASS (code) == tcc_binary
6445 || TREE_CODE_CLASS (code) == tcc_comparison)
6447 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6448 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6449 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6451 if (TREE_CODE (arg1) == COMPOUND_EXPR
6452 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6453 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6454 fold (build2 (code, type,
6455 arg0, TREE_OPERAND (arg1, 1))));
6457 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
6459 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6460 /*cond_first_p=*/1);
6461 if (tem != NULL_TREE)
6465 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
6467 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6468 /*cond_first_p=*/0);
6469 if (tem != NULL_TREE)
6477 return fold (DECL_INITIAL (t));
6482 case FIX_TRUNC_EXPR:
6484 case FIX_FLOOR_EXPR:
6485 case FIX_ROUND_EXPR:
6486 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6487 return TREE_OPERAND (t, 0);
6489 /* Handle cases of two conversions in a row. */
6490 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6491 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6493 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6494 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6495 int inside_int = INTEGRAL_TYPE_P (inside_type);
6496 int inside_ptr = POINTER_TYPE_P (inside_type);
6497 int inside_float = FLOAT_TYPE_P (inside_type);
6498 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6499 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6500 int inter_int = INTEGRAL_TYPE_P (inter_type);
6501 int inter_ptr = POINTER_TYPE_P (inter_type);
6502 int inter_float = FLOAT_TYPE_P (inter_type);
6503 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6504 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6505 int final_int = INTEGRAL_TYPE_P (type);
6506 int final_ptr = POINTER_TYPE_P (type);
6507 int final_float = FLOAT_TYPE_P (type);
6508 unsigned int final_prec = TYPE_PRECISION (type);
6509 int final_unsignedp = TYPE_UNSIGNED (type);
6511 /* In addition to the cases of two conversions in a row
6512 handled below, if we are converting something to its own
6513 type via an object of identical or wider precision, neither
6514 conversion is needed. */
6515 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6516 && ((inter_int && final_int) || (inter_float && final_float))
6517 && inter_prec >= final_prec)
6518 return fold (build1 (code, type,
6519 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6521 /* Likewise, if the intermediate and final types are either both
6522 float or both integer, we don't need the middle conversion if
6523 it is wider than the final type and doesn't change the signedness
6524 (for integers). Avoid this if the final type is a pointer
6525 since then we sometimes need the inner conversion. Likewise if
6526 the outer has a precision not equal to the size of its mode. */
6527 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6528 || (inter_float && inside_float))
6529 && inter_prec >= inside_prec
6530 && (inter_float || inter_unsignedp == inside_unsignedp)
6531 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6532 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6534 return fold (build1 (code, type,
6535 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6537 /* If we have a sign-extension of a zero-extended value, we can
6538 replace that by a single zero-extension. */
6539 if (inside_int && inter_int && final_int
6540 && inside_prec < inter_prec && inter_prec < final_prec
6541 && inside_unsignedp && !inter_unsignedp)
6542 return fold (build1 (code, type,
6543 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6545 /* Two conversions in a row are not needed unless:
6546 - some conversion is floating-point (overstrict for now), or
6547 - the intermediate type is narrower than both initial and
6549 - the intermediate type and innermost type differ in signedness,
6550 and the outermost type is wider than the intermediate, or
6551 - the initial type is a pointer type and the precisions of the
6552 intermediate and final types differ, or
6553 - the final type is a pointer type and the precisions of the
6554 initial and intermediate types differ. */
6555 if (! inside_float && ! inter_float && ! final_float
6556 && (inter_prec > inside_prec || inter_prec > final_prec)
6557 && ! (inside_int && inter_int
6558 && inter_unsignedp != inside_unsignedp
6559 && inter_prec < final_prec)
6560 && ((inter_unsignedp && inter_prec > inside_prec)
6561 == (final_unsignedp && final_prec > inter_prec))
6562 && ! (inside_ptr && inter_prec != final_prec)
6563 && ! (final_ptr && inside_prec != inter_prec)
6564 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6565 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6567 return fold (build1 (code, type,
6568 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6571 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6572 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6573 /* Detect assigning a bitfield. */
6574 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6575 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6577 /* Don't leave an assignment inside a conversion
6578 unless assigning a bitfield. */
6579 tree prev = TREE_OPERAND (t, 0);
6580 tem = copy_node (t);
6581 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6582 /* First do the assignment, then return converted constant. */
6583 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6584 TREE_NO_WARNING (tem) = 1;
6585 TREE_USED (tem) = 1;
6589 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6590 constants (if x has signed type, the sign bit cannot be set
6591 in c). This folds extension into the BIT_AND_EXPR. */
6592 if (INTEGRAL_TYPE_P (type)
6593 && TREE_CODE (type) != BOOLEAN_TYPE
6594 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6595 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6597 tree and = TREE_OPERAND (t, 0);
6598 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6601 if (TYPE_UNSIGNED (TREE_TYPE (and))
6602 || (TYPE_PRECISION (type)
6603 <= TYPE_PRECISION (TREE_TYPE (and))))
6605 else if (TYPE_PRECISION (TREE_TYPE (and1))
6606 <= HOST_BITS_PER_WIDE_INT
6607 && host_integerp (and1, 1))
6609 unsigned HOST_WIDE_INT cst;
6611 cst = tree_low_cst (and1, 1);
6612 cst &= (HOST_WIDE_INT) -1
6613 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6614 change = (cst == 0);
6615 #ifdef LOAD_EXTEND_OP
6617 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6620 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6621 and0 = fold_convert (uns, and0);
6622 and1 = fold_convert (uns, and1);
6627 return fold (build2 (BIT_AND_EXPR, type,
6628 fold_convert (type, and0),
6629 fold_convert (type, and1)));
6632 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6633 T2 being pointers to types of the same size. */
6634 if (POINTER_TYPE_P (TREE_TYPE (t))
6635 && BINARY_CLASS_P (arg0)
6636 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6637 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6639 tree arg00 = TREE_OPERAND (arg0, 0);
6640 tree t0 = TREE_TYPE (t);
6641 tree t1 = TREE_TYPE (arg00);
6642 tree tt0 = TREE_TYPE (t0);
6643 tree tt1 = TREE_TYPE (t1);
6644 tree s0 = TYPE_SIZE (tt0);
6645 tree s1 = TYPE_SIZE (tt1);
6647 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6648 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6649 TREE_OPERAND (arg0, 1));
6652 tem = fold_convert_const (code, type, arg0);
6653 return tem ? tem : t;
6655 case VIEW_CONVERT_EXPR:
6656 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6657 return build1 (VIEW_CONVERT_EXPR, type,
6658 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6662 if (TREE_CODE (arg0) == CONSTRUCTOR
6663 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6665 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6667 return TREE_VALUE (m);
6672 if (TREE_CONSTANT (t) != wins)
6674 tem = copy_node (t);
6675 TREE_CONSTANT (tem) = wins;
6676 TREE_INVARIANT (tem) = wins;
6682 if (negate_expr_p (arg0))
6683 return fold_convert (type, negate_expr (arg0));
6687 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6688 return fold_abs_const (arg0, type);
6689 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6690 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6691 /* Convert fabs((double)float) into (double)fabsf(float). */
6692 else if (TREE_CODE (arg0) == NOP_EXPR
6693 && TREE_CODE (type) == REAL_TYPE)
6695 tree targ0 = strip_float_extensions (arg0);
6697 return fold_convert (type, fold (build1 (ABS_EXPR,
6701 else if (tree_expr_nonnegative_p (arg0))
6706 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6707 return fold_convert (type, arg0);
6708 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6709 return build2 (COMPLEX_EXPR, type,
6710 TREE_OPERAND (arg0, 0),
6711 negate_expr (TREE_OPERAND (arg0, 1)));
6712 else if (TREE_CODE (arg0) == COMPLEX_CST)
6713 return build_complex (type, TREE_REALPART (arg0),
6714 negate_expr (TREE_IMAGPART (arg0)));
6715 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6716 return fold (build2 (TREE_CODE (arg0), type,
6717 fold (build1 (CONJ_EXPR, type,
6718 TREE_OPERAND (arg0, 0))),
6719 fold (build1 (CONJ_EXPR, type,
6720 TREE_OPERAND (arg0, 1)))));
6721 else if (TREE_CODE (arg0) == CONJ_EXPR)
6722 return TREE_OPERAND (arg0, 0);
6726 if (TREE_CODE (arg0) == INTEGER_CST)
6727 return fold_not_const (arg0, type);
6728 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6729 return TREE_OPERAND (arg0, 0);
6733 /* A + (-B) -> A - B */
6734 if (TREE_CODE (arg1) == NEGATE_EXPR)
6735 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6736 /* (-A) + B -> B - A */
6737 if (TREE_CODE (arg0) == NEGATE_EXPR
6738 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6739 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6740 if (! FLOAT_TYPE_P (type))
6742 if (integer_zerop (arg1))
6743 return non_lvalue (fold_convert (type, arg0));
6745 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6746 with a constant, and the two constants have no bits in common,
6747 we should treat this as a BIT_IOR_EXPR since this may produce more
6749 if (TREE_CODE (arg0) == BIT_AND_EXPR
6750 && TREE_CODE (arg1) == BIT_AND_EXPR
6751 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6752 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6753 && integer_zerop (const_binop (BIT_AND_EXPR,
6754 TREE_OPERAND (arg0, 1),
6755 TREE_OPERAND (arg1, 1), 0)))
6757 code = BIT_IOR_EXPR;
6761 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6762 (plus (plus (mult) (mult)) (foo)) so that we can
6763 take advantage of the factoring cases below. */
6764 if (((TREE_CODE (arg0) == PLUS_EXPR
6765 || TREE_CODE (arg0) == MINUS_EXPR)
6766 && TREE_CODE (arg1) == MULT_EXPR)
6767 || ((TREE_CODE (arg1) == PLUS_EXPR
6768 || TREE_CODE (arg1) == MINUS_EXPR)
6769 && TREE_CODE (arg0) == MULT_EXPR))
6771 tree parg0, parg1, parg, marg;
6772 enum tree_code pcode;
6774 if (TREE_CODE (arg1) == MULT_EXPR)
6775 parg = arg0, marg = arg1;
6777 parg = arg1, marg = arg0;
6778 pcode = TREE_CODE (parg);
6779 parg0 = TREE_OPERAND (parg, 0);
6780 parg1 = TREE_OPERAND (parg, 1);
6784 if (TREE_CODE (parg0) == MULT_EXPR
6785 && TREE_CODE (parg1) != MULT_EXPR)
6786 return fold (build2 (pcode, type,
6787 fold (build2 (PLUS_EXPR, type,
6788 fold_convert (type, parg0),
6789 fold_convert (type, marg))),
6790 fold_convert (type, parg1)));
6791 if (TREE_CODE (parg0) != MULT_EXPR
6792 && TREE_CODE (parg1) == MULT_EXPR)
6793 return fold (build2 (PLUS_EXPR, type,
6794 fold_convert (type, parg0),
6795 fold (build2 (pcode, type,
6796 fold_convert (type, marg),
6801 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6803 tree arg00, arg01, arg10, arg11;
6804 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6806 /* (A * C) + (B * C) -> (A+B) * C.
6807 We are most concerned about the case where C is a constant,
6808 but other combinations show up during loop reduction. Since
6809 it is not difficult, try all four possibilities. */
6811 arg00 = TREE_OPERAND (arg0, 0);
6812 arg01 = TREE_OPERAND (arg0, 1);
6813 arg10 = TREE_OPERAND (arg1, 0);
6814 arg11 = TREE_OPERAND (arg1, 1);
6817 if (operand_equal_p (arg01, arg11, 0))
6818 same = arg01, alt0 = arg00, alt1 = arg10;
6819 else if (operand_equal_p (arg00, arg10, 0))
6820 same = arg00, alt0 = arg01, alt1 = arg11;
6821 else if (operand_equal_p (arg00, arg11, 0))
6822 same = arg00, alt0 = arg01, alt1 = arg10;
6823 else if (operand_equal_p (arg01, arg10, 0))
6824 same = arg01, alt0 = arg00, alt1 = arg11;
6826 /* No identical multiplicands; see if we can find a common
6827 power-of-two factor in non-power-of-two multiplies. This
6828 can help in multi-dimensional array access. */
6829 else if (TREE_CODE (arg01) == INTEGER_CST
6830 && TREE_CODE (arg11) == INTEGER_CST
6831 && TREE_INT_CST_HIGH (arg01) == 0
6832 && TREE_INT_CST_HIGH (arg11) == 0)
6834 HOST_WIDE_INT int01, int11, tmp;
6835 int01 = TREE_INT_CST_LOW (arg01);
6836 int11 = TREE_INT_CST_LOW (arg11);
6838 /* Move min of absolute values to int11. */
6839 if ((int01 >= 0 ? int01 : -int01)
6840 < (int11 >= 0 ? int11 : -int11))
6842 tmp = int01, int01 = int11, int11 = tmp;
6843 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6844 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6847 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6849 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6850 build_int_cst (NULL_TREE,
6858 return fold (build2 (MULT_EXPR, type,
6859 fold (build2 (PLUS_EXPR, type,
6860 fold_convert (type, alt0),
6861 fold_convert (type, alt1))),
6865 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
6866 of the array. Loop optimizer sometimes produce this type of
6868 if (TREE_CODE (arg0) == ADDR_EXPR
6869 && TREE_CODE (arg1) == MULT_EXPR)
6871 tem = try_move_mult_to_index (type, PLUS_EXPR, arg0, arg1);
6875 else if (TREE_CODE (arg1) == ADDR_EXPR
6876 && TREE_CODE (arg0) == MULT_EXPR)
6878 tem = try_move_mult_to_index (type, PLUS_EXPR, arg1, arg0);
6885 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6886 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6887 return non_lvalue (fold_convert (type, arg0));
6889 /* Likewise if the operands are reversed. */
6890 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6891 return non_lvalue (fold_convert (type, arg1));
6893 /* Convert X + -C into X - C. */
6894 if (TREE_CODE (arg1) == REAL_CST
6895 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6897 tem = fold_negate_const (arg1, type);
6898 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6899 return fold (build2 (MINUS_EXPR, type,
6900 fold_convert (type, arg0),
6901 fold_convert (type, tem)));
6904 /* Convert x+x into x*2.0. */
6905 if (operand_equal_p (arg0, arg1, 0)
6906 && SCALAR_FLOAT_TYPE_P (type))
6907 return fold (build2 (MULT_EXPR, type, arg0,
6908 build_real (type, dconst2)));
6910 /* Convert x*c+x into x*(c+1). */
6911 if (flag_unsafe_math_optimizations
6912 && TREE_CODE (arg0) == MULT_EXPR
6913 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6914 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6915 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6919 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6920 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6921 return fold (build2 (MULT_EXPR, type, arg1,
6922 build_real (type, c)));
6925 /* Convert x+x*c into x*(c+1). */
6926 if (flag_unsafe_math_optimizations
6927 && TREE_CODE (arg1) == MULT_EXPR
6928 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6929 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6930 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6934 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6935 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6936 return fold (build2 (MULT_EXPR, type, arg0,
6937 build_real (type, c)));
6940 /* Convert x*c1+x*c2 into x*(c1+c2). */
6941 if (flag_unsafe_math_optimizations
6942 && TREE_CODE (arg0) == MULT_EXPR
6943 && TREE_CODE (arg1) == MULT_EXPR
6944 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6945 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6946 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6947 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6948 && operand_equal_p (TREE_OPERAND (arg0, 0),
6949 TREE_OPERAND (arg1, 0), 0))
6951 REAL_VALUE_TYPE c1, c2;
6953 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6954 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6955 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6956 return fold (build2 (MULT_EXPR, type,
6957 TREE_OPERAND (arg0, 0),
6958 build_real (type, c1)));
6960 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
6961 if (flag_unsafe_math_optimizations
6962 && TREE_CODE (arg1) == PLUS_EXPR
6963 && TREE_CODE (arg0) != MULT_EXPR)
6965 tree tree10 = TREE_OPERAND (arg1, 0);
6966 tree tree11 = TREE_OPERAND (arg1, 1);
6967 if (TREE_CODE (tree11) == MULT_EXPR
6968 && TREE_CODE (tree10) == MULT_EXPR)
6971 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6972 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6975 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
6976 if (flag_unsafe_math_optimizations
6977 && TREE_CODE (arg0) == PLUS_EXPR
6978 && TREE_CODE (arg1) != MULT_EXPR)
6980 tree tree00 = TREE_OPERAND (arg0, 0);
6981 tree tree01 = TREE_OPERAND (arg0, 1);
6982 if (TREE_CODE (tree01) == MULT_EXPR
6983 && TREE_CODE (tree00) == MULT_EXPR)
6986 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6987 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6993 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6994 is a rotate of A by C1 bits. */
6995 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6996 is a rotate of A by B bits. */
6998 enum tree_code code0, code1;
6999 code0 = TREE_CODE (arg0);
7000 code1 = TREE_CODE (arg1);
7001 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7002 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7003 && operand_equal_p (TREE_OPERAND (arg0, 0),
7004 TREE_OPERAND (arg1, 0), 0)
7005 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7007 tree tree01, tree11;
7008 enum tree_code code01, code11;
7010 tree01 = TREE_OPERAND (arg0, 1);
7011 tree11 = TREE_OPERAND (arg1, 1);
7012 STRIP_NOPS (tree01);
7013 STRIP_NOPS (tree11);
7014 code01 = TREE_CODE (tree01);
7015 code11 = TREE_CODE (tree11);
7016 if (code01 == INTEGER_CST
7017 && code11 == INTEGER_CST
7018 && TREE_INT_CST_HIGH (tree01) == 0
7019 && TREE_INT_CST_HIGH (tree11) == 0
7020 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7021 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7022 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7023 code0 == LSHIFT_EXPR ? tree01 : tree11);
7024 else if (code11 == MINUS_EXPR)
7026 tree tree110, tree111;
7027 tree110 = TREE_OPERAND (tree11, 0);
7028 tree111 = TREE_OPERAND (tree11, 1);
7029 STRIP_NOPS (tree110);
7030 STRIP_NOPS (tree111);
7031 if (TREE_CODE (tree110) == INTEGER_CST
7032 && 0 == compare_tree_int (tree110,
7034 (TREE_TYPE (TREE_OPERAND
7036 && operand_equal_p (tree01, tree111, 0))
7037 return build2 ((code0 == LSHIFT_EXPR
7040 type, TREE_OPERAND (arg0, 0), tree01);
7042 else if (code01 == MINUS_EXPR)
7044 tree tree010, tree011;
7045 tree010 = TREE_OPERAND (tree01, 0);
7046 tree011 = TREE_OPERAND (tree01, 1);
7047 STRIP_NOPS (tree010);
7048 STRIP_NOPS (tree011);
7049 if (TREE_CODE (tree010) == INTEGER_CST
7050 && 0 == compare_tree_int (tree010,
7052 (TREE_TYPE (TREE_OPERAND
7054 && operand_equal_p (tree11, tree011, 0))
7055 return build2 ((code0 != LSHIFT_EXPR
7058 type, TREE_OPERAND (arg0, 0), tree11);
7064 /* In most languages, can't associate operations on floats through
7065 parentheses. Rather than remember where the parentheses were, we
7066 don't associate floats at all, unless the user has specified
7067 -funsafe-math-optimizations. */
7070 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7072 tree var0, con0, lit0, minus_lit0;
7073 tree var1, con1, lit1, minus_lit1;
7075 /* Split both trees into variables, constants, and literals. Then
7076 associate each group together, the constants with literals,
7077 then the result with variables. This increases the chances of
7078 literals being recombined later and of generating relocatable
7079 expressions for the sum of a constant and literal. */
7080 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7081 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7082 code == MINUS_EXPR);
7084 /* Only do something if we found more than two objects. Otherwise,
7085 nothing has changed and we risk infinite recursion. */
7086 if (2 < ((var0 != 0) + (var1 != 0)
7087 + (con0 != 0) + (con1 != 0)
7088 + (lit0 != 0) + (lit1 != 0)
7089 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7091 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7092 if (code == MINUS_EXPR)
7095 var0 = associate_trees (var0, var1, code, type);
7096 con0 = associate_trees (con0, con1, code, type);
7097 lit0 = associate_trees (lit0, lit1, code, type);
7098 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7100 /* Preserve the MINUS_EXPR if the negative part of the literal is
7101 greater than the positive part. Otherwise, the multiplicative
7102 folding code (i.e extract_muldiv) may be fooled in case
7103 unsigned constants are subtracted, like in the following
7104 example: ((X*2 + 4) - 8U)/2. */
7105 if (minus_lit0 && lit0)
7107 if (TREE_CODE (lit0) == INTEGER_CST
7108 && TREE_CODE (minus_lit0) == INTEGER_CST
7109 && tree_int_cst_lt (lit0, minus_lit0))
7111 minus_lit0 = associate_trees (minus_lit0, lit0,
7117 lit0 = associate_trees (lit0, minus_lit0,
7125 return fold_convert (type,
7126 associate_trees (var0, minus_lit0,
7130 con0 = associate_trees (con0, minus_lit0,
7132 return fold_convert (type,
7133 associate_trees (var0, con0,
7138 con0 = associate_trees (con0, lit0, code, type);
7139 return fold_convert (type, associate_trees (var0, con0,
7146 t1 = const_binop (code, arg0, arg1, 0);
7147 if (t1 != NULL_TREE)
7149 /* The return value should always have
7150 the same type as the original expression. */
7151 if (TREE_TYPE (t1) != type)
7152 t1 = fold_convert (type, t1);
7159 /* A - (-B) -> A + B */
7160 if (TREE_CODE (arg1) == NEGATE_EXPR)
7161 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
7162 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7163 if (TREE_CODE (arg0) == NEGATE_EXPR
7164 && (FLOAT_TYPE_P (type)
7165 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7166 && negate_expr_p (arg1)
7167 && reorder_operands_p (arg0, arg1))
7168 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
7169 TREE_OPERAND (arg0, 0)));
7171 if (! FLOAT_TYPE_P (type))
7173 if (! wins && integer_zerop (arg0))
7174 return negate_expr (fold_convert (type, arg1));
7175 if (integer_zerop (arg1))
7176 return non_lvalue (fold_convert (type, arg0));
7178 /* Fold A - (A & B) into ~B & A. */
7179 if (!TREE_SIDE_EFFECTS (arg0)
7180 && TREE_CODE (arg1) == BIT_AND_EXPR)
7182 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7183 return fold (build2 (BIT_AND_EXPR, type,
7184 fold (build1 (BIT_NOT_EXPR, type,
7185 TREE_OPERAND (arg1, 0))),
7187 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7188 return fold (build2 (BIT_AND_EXPR, type,
7189 fold (build1 (BIT_NOT_EXPR, type,
7190 TREE_OPERAND (arg1, 1))),
7194 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7195 any power of 2 minus 1. */
7196 if (TREE_CODE (arg0) == BIT_AND_EXPR
7197 && TREE_CODE (arg1) == BIT_AND_EXPR
7198 && operand_equal_p (TREE_OPERAND (arg0, 0),
7199 TREE_OPERAND (arg1, 0), 0))
7201 tree mask0 = TREE_OPERAND (arg0, 1);
7202 tree mask1 = TREE_OPERAND (arg1, 1);
7203 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
7205 if (operand_equal_p (tem, mask1, 0))
7207 tem = fold (build2 (BIT_XOR_EXPR, type,
7208 TREE_OPERAND (arg0, 0), mask1));
7209 return fold (build2 (MINUS_EXPR, type, tem, mask1));
7214 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7215 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7216 return non_lvalue (fold_convert (type, arg0));
7218 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7219 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7220 (-ARG1 + ARG0) reduces to -ARG1. */
7221 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7222 return negate_expr (fold_convert (type, arg1));
7224 /* Fold &x - &x. This can happen from &x.foo - &x.
7225 This is unsafe for certain floats even in non-IEEE formats.
7226 In IEEE, it is unsafe because it does wrong for NaNs.
7227 Also note that operand_equal_p is always false if an operand
7230 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7231 && operand_equal_p (arg0, arg1, 0))
7232 return fold_convert (type, integer_zero_node);
7234 /* A - B -> A + (-B) if B is easily negatable. */
7235 if (!wins && negate_expr_p (arg1)
7236 && ((FLOAT_TYPE_P (type)
7237 /* Avoid this transformation if B is a positive REAL_CST. */
7238 && (TREE_CODE (arg1) != REAL_CST
7239 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7240 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7241 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
7243 /* Try folding difference of addresses. */
7247 if ((TREE_CODE (arg0) == ADDR_EXPR
7248 || TREE_CODE (arg1) == ADDR_EXPR)
7249 && ptr_difference_const (arg0, arg1, &diff))
7250 return build_int_cst_type (type, diff);
7253 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7254 of the array. Loop optimizer sometimes produce this type of
7256 if (TREE_CODE (arg0) == ADDR_EXPR
7257 && TREE_CODE (arg1) == MULT_EXPR)
7259 tem = try_move_mult_to_index (type, MINUS_EXPR, arg0, arg1);
7264 if (TREE_CODE (arg0) == MULT_EXPR
7265 && TREE_CODE (arg1) == MULT_EXPR
7266 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7268 /* (A * C) - (B * C) -> (A-B) * C. */
7269 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7270 TREE_OPERAND (arg1, 1), 0))
7271 return fold (build2 (MULT_EXPR, type,
7272 fold (build2 (MINUS_EXPR, type,
7273 TREE_OPERAND (arg0, 0),
7274 TREE_OPERAND (arg1, 0))),
7275 TREE_OPERAND (arg0, 1)));
7276 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7277 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7278 TREE_OPERAND (arg1, 0), 0))
7279 return fold (build2 (MULT_EXPR, type,
7280 TREE_OPERAND (arg0, 0),
7281 fold (build2 (MINUS_EXPR, type,
7282 TREE_OPERAND (arg0, 1),
7283 TREE_OPERAND (arg1, 1)))));
7289 /* (-A) * (-B) -> A * B */
7290 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7291 return fold (build2 (MULT_EXPR, type,
7292 TREE_OPERAND (arg0, 0),
7293 negate_expr (arg1)));
7294 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7295 return fold (build2 (MULT_EXPR, type,
7297 TREE_OPERAND (arg1, 0)));
7299 if (! FLOAT_TYPE_P (type))
7301 if (integer_zerop (arg1))
7302 return omit_one_operand (type, arg1, arg0);
7303 if (integer_onep (arg1))
7304 return non_lvalue (fold_convert (type, arg0));
7306 /* (a * (1 << b)) is (a << b) */
7307 if (TREE_CODE (arg1) == LSHIFT_EXPR
7308 && integer_onep (TREE_OPERAND (arg1, 0)))
7309 return fold (build2 (LSHIFT_EXPR, type, arg0,
7310 TREE_OPERAND (arg1, 1)));
7311 if (TREE_CODE (arg0) == LSHIFT_EXPR
7312 && integer_onep (TREE_OPERAND (arg0, 0)))
7313 return fold (build2 (LSHIFT_EXPR, type, arg1,
7314 TREE_OPERAND (arg0, 1)));
7316 if (TREE_CODE (arg1) == INTEGER_CST
7317 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7318 fold_convert (type, arg1),
7320 return fold_convert (type, tem);
7325 /* Maybe fold x * 0 to 0. The expressions aren't the same
7326 when x is NaN, since x * 0 is also NaN. Nor are they the
7327 same in modes with signed zeros, since multiplying a
7328 negative value by 0 gives -0, not +0. */
7329 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7330 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7331 && real_zerop (arg1))
7332 return omit_one_operand (type, arg1, arg0);
7333 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7334 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7335 && real_onep (arg1))
7336 return non_lvalue (fold_convert (type, arg0));
7338 /* Transform x * -1.0 into -x. */
7339 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7340 && real_minus_onep (arg1))
7341 return fold_convert (type, negate_expr (arg0));
7343 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7344 if (flag_unsafe_math_optimizations
7345 && TREE_CODE (arg0) == RDIV_EXPR
7346 && TREE_CODE (arg1) == REAL_CST
7347 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7349 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7352 return fold (build2 (RDIV_EXPR, type, tem,
7353 TREE_OPERAND (arg0, 1)));
7356 if (flag_unsafe_math_optimizations)
7358 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7359 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7361 /* Optimizations of root(...)*root(...). */
7362 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7364 tree rootfn, arg, arglist;
7365 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7366 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7368 /* Optimize sqrt(x)*sqrt(x) as x. */
7369 if (BUILTIN_SQRT_P (fcode0)
7370 && operand_equal_p (arg00, arg10, 0)
7371 && ! HONOR_SNANS (TYPE_MODE (type)))
7374 /* Optimize root(x)*root(y) as root(x*y). */
7375 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7376 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7377 arglist = build_tree_list (NULL_TREE, arg);
7378 return build_function_call_expr (rootfn, arglist);
7381 /* Optimize expN(x)*expN(y) as expN(x+y). */
7382 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7384 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7385 tree arg = build2 (PLUS_EXPR, type,
7386 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7387 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7388 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7389 return build_function_call_expr (expfn, arglist);
7392 /* Optimizations of pow(...)*pow(...). */
7393 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7394 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7395 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7397 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7398 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7400 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7401 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7404 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7405 if (operand_equal_p (arg01, arg11, 0))
7407 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7408 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7409 tree arglist = tree_cons (NULL_TREE, fold (arg),
7410 build_tree_list (NULL_TREE,
7412 return build_function_call_expr (powfn, arglist);
7415 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7416 if (operand_equal_p (arg00, arg10, 0))
7418 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7419 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7420 tree arglist = tree_cons (NULL_TREE, arg00,
7421 build_tree_list (NULL_TREE,
7423 return build_function_call_expr (powfn, arglist);
7427 /* Optimize tan(x)*cos(x) as sin(x). */
7428 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7429 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7430 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7431 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7432 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7433 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7434 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7435 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7437 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7439 if (sinfn != NULL_TREE)
7440 return build_function_call_expr (sinfn,
7441 TREE_OPERAND (arg0, 1));
7444 /* Optimize x*pow(x,c) as pow(x,c+1). */
7445 if (fcode1 == BUILT_IN_POW
7446 || fcode1 == BUILT_IN_POWF
7447 || fcode1 == BUILT_IN_POWL)
7449 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7450 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7452 if (TREE_CODE (arg11) == REAL_CST
7453 && ! TREE_CONSTANT_OVERFLOW (arg11)
7454 && operand_equal_p (arg0, arg10, 0))
7456 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7460 c = TREE_REAL_CST (arg11);
7461 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7462 arg = build_real (type, c);
7463 arglist = build_tree_list (NULL_TREE, arg);
7464 arglist = tree_cons (NULL_TREE, arg0, arglist);
7465 return build_function_call_expr (powfn, arglist);
7469 /* Optimize pow(x,c)*x as pow(x,c+1). */
7470 if (fcode0 == BUILT_IN_POW
7471 || fcode0 == BUILT_IN_POWF
7472 || fcode0 == BUILT_IN_POWL)
7474 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7475 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7477 if (TREE_CODE (arg01) == REAL_CST
7478 && ! TREE_CONSTANT_OVERFLOW (arg01)
7479 && operand_equal_p (arg1, arg00, 0))
7481 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7485 c = TREE_REAL_CST (arg01);
7486 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7487 arg = build_real (type, c);
7488 arglist = build_tree_list (NULL_TREE, arg);
7489 arglist = tree_cons (NULL_TREE, arg1, arglist);
7490 return build_function_call_expr (powfn, arglist);
7494 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7496 && operand_equal_p (arg0, arg1, 0))
7498 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7502 tree arg = build_real (type, dconst2);
7503 tree arglist = build_tree_list (NULL_TREE, arg);
7504 arglist = tree_cons (NULL_TREE, arg0, arglist);
7505 return build_function_call_expr (powfn, arglist);
7514 if (integer_all_onesp (arg1))
7515 return omit_one_operand (type, arg1, arg0);
7516 if (integer_zerop (arg1))
7517 return non_lvalue (fold_convert (type, arg0));
7518 if (operand_equal_p (arg0, arg1, 0))
7519 return non_lvalue (fold_convert (type, arg0));
7522 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7523 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7525 t1 = build_int_cst (type, -1);
7526 t1 = force_fit_type (t1, 0, false, false);
7527 return omit_one_operand (type, t1, arg1);
7531 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7532 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7534 t1 = build_int_cst (type, -1);
7535 t1 = force_fit_type (t1, 0, false, false);
7536 return omit_one_operand (type, t1, arg0);
7539 t1 = distribute_bit_expr (code, type, arg0, arg1);
7540 if (t1 != NULL_TREE)
7543 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7545 This results in more efficient code for machines without a NAND
7546 instruction. Combine will canonicalize to the first form
7547 which will allow use of NAND instructions provided by the
7548 backend if they exist. */
7549 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7550 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7552 return fold (build1 (BIT_NOT_EXPR, type,
7553 build2 (BIT_AND_EXPR, type,
7554 TREE_OPERAND (arg0, 0),
7555 TREE_OPERAND (arg1, 0))));
7558 /* See if this can be simplified into a rotate first. If that
7559 is unsuccessful continue in the association code. */
7563 if (integer_zerop (arg1))
7564 return non_lvalue (fold_convert (type, arg0));
7565 if (integer_all_onesp (arg1))
7566 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7567 if (operand_equal_p (arg0, arg1, 0))
7568 return omit_one_operand (type, integer_zero_node, arg0);
7571 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7572 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7574 t1 = build_int_cst (type, -1);
7575 t1 = force_fit_type (t1, 0, false, false);
7576 return omit_one_operand (type, t1, arg1);
7580 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7581 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7583 t1 = build_int_cst (type, -1);
7584 t1 = force_fit_type (t1, 0, false, false);
7585 return omit_one_operand (type, t1, arg0);
7588 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7589 with a constant, and the two constants have no bits in common,
7590 we should treat this as a BIT_IOR_EXPR since this may produce more
7592 if (TREE_CODE (arg0) == BIT_AND_EXPR
7593 && TREE_CODE (arg1) == BIT_AND_EXPR
7594 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7595 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7596 && integer_zerop (const_binop (BIT_AND_EXPR,
7597 TREE_OPERAND (arg0, 1),
7598 TREE_OPERAND (arg1, 1), 0)))
7600 code = BIT_IOR_EXPR;
7604 /* See if this can be simplified into a rotate first. If that
7605 is unsuccessful continue in the association code. */
7609 if (integer_all_onesp (arg1))
7610 return non_lvalue (fold_convert (type, arg0));
7611 if (integer_zerop (arg1))
7612 return omit_one_operand (type, arg1, arg0);
7613 if (operand_equal_p (arg0, arg1, 0))
7614 return non_lvalue (fold_convert (type, arg0));
7616 /* ~X & X is always zero. */
7617 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7618 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7619 return omit_one_operand (type, integer_zero_node, arg1);
7621 /* X & ~X is always zero. */
7622 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7623 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7624 return omit_one_operand (type, integer_zero_node, arg0);
7626 t1 = distribute_bit_expr (code, type, arg0, arg1);
7627 if (t1 != NULL_TREE)
7629 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7630 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7631 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7634 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7636 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7637 && (~TREE_INT_CST_LOW (arg1)
7638 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7639 return fold_convert (type, TREE_OPERAND (arg0, 0));
7642 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7644 This results in more efficient code for machines without a NOR
7645 instruction. Combine will canonicalize to the first form
7646 which will allow use of NOR instructions provided by the
7647 backend if they exist. */
7648 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7649 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7651 return fold (build1 (BIT_NOT_EXPR, type,
7652 build2 (BIT_IOR_EXPR, type,
7653 TREE_OPERAND (arg0, 0),
7654 TREE_OPERAND (arg1, 0))));
7660 /* Don't touch a floating-point divide by zero unless the mode
7661 of the constant can represent infinity. */
7662 if (TREE_CODE (arg1) == REAL_CST
7663 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7664 && real_zerop (arg1))
7667 /* (-A) / (-B) -> A / B */
7668 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7669 return fold (build2 (RDIV_EXPR, type,
7670 TREE_OPERAND (arg0, 0),
7671 negate_expr (arg1)));
7672 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7673 return fold (build2 (RDIV_EXPR, type,
7675 TREE_OPERAND (arg1, 0)));
7677 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7678 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7679 && real_onep (arg1))
7680 return non_lvalue (fold_convert (type, arg0));
7682 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7683 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7684 && real_minus_onep (arg1))
7685 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7687 /* If ARG1 is a constant, we can convert this to a multiply by the
7688 reciprocal. This does not have the same rounding properties,
7689 so only do this if -funsafe-math-optimizations. We can actually
7690 always safely do it if ARG1 is a power of two, but it's hard to
7691 tell if it is or not in a portable manner. */
7692 if (TREE_CODE (arg1) == REAL_CST)
7694 if (flag_unsafe_math_optimizations
7695 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7697 return fold (build2 (MULT_EXPR, type, arg0, tem));
7698 /* Find the reciprocal if optimizing and the result is exact. */
7702 r = TREE_REAL_CST (arg1);
7703 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7705 tem = build_real (type, r);
7706 return fold (build2 (MULT_EXPR, type, arg0, tem));
7710 /* Convert A/B/C to A/(B*C). */
7711 if (flag_unsafe_math_optimizations
7712 && TREE_CODE (arg0) == RDIV_EXPR)
7713 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7714 fold (build2 (MULT_EXPR, type,
7715 TREE_OPERAND (arg0, 1), arg1))));
7717 /* Convert A/(B/C) to (A/B)*C. */
7718 if (flag_unsafe_math_optimizations
7719 && TREE_CODE (arg1) == RDIV_EXPR)
7720 return fold (build2 (MULT_EXPR, type,
7721 fold (build2 (RDIV_EXPR, type, arg0,
7722 TREE_OPERAND (arg1, 0))),
7723 TREE_OPERAND (arg1, 1)));
7725 /* Convert C1/(X*C2) into (C1/C2)/X. */
7726 if (flag_unsafe_math_optimizations
7727 && TREE_CODE (arg1) == MULT_EXPR
7728 && TREE_CODE (arg0) == REAL_CST
7729 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7731 tree tem = const_binop (RDIV_EXPR, arg0,
7732 TREE_OPERAND (arg1, 1), 0);
7734 return fold (build2 (RDIV_EXPR, type, tem,
7735 TREE_OPERAND (arg1, 0)));
7738 if (flag_unsafe_math_optimizations)
7740 enum built_in_function fcode = builtin_mathfn_code (arg1);
7741 /* Optimize x/expN(y) into x*expN(-y). */
7742 if (BUILTIN_EXPONENT_P (fcode))
7744 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7745 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7746 tree arglist = build_tree_list (NULL_TREE,
7747 fold_convert (type, arg));
7748 arg1 = build_function_call_expr (expfn, arglist);
7749 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7752 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7753 if (fcode == BUILT_IN_POW
7754 || fcode == BUILT_IN_POWF
7755 || fcode == BUILT_IN_POWL)
7757 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7758 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7759 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7760 tree neg11 = fold_convert (type, negate_expr (arg11));
7761 tree arglist = tree_cons(NULL_TREE, arg10,
7762 build_tree_list (NULL_TREE, neg11));
7763 arg1 = build_function_call_expr (powfn, arglist);
7764 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7768 if (flag_unsafe_math_optimizations)
7770 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7771 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7773 /* Optimize sin(x)/cos(x) as tan(x). */
7774 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7775 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7776 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7777 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7778 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7780 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7782 if (tanfn != NULL_TREE)
7783 return build_function_call_expr (tanfn,
7784 TREE_OPERAND (arg0, 1));
7787 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7788 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7789 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7790 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7791 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7792 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7794 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7796 if (tanfn != NULL_TREE)
7798 tree tmp = TREE_OPERAND (arg0, 1);
7799 tmp = build_function_call_expr (tanfn, tmp);
7800 return fold (build2 (RDIV_EXPR, type,
7801 build_real (type, dconst1), tmp));
7805 /* Optimize pow(x,c)/x as pow(x,c-1). */
7806 if (fcode0 == BUILT_IN_POW
7807 || fcode0 == BUILT_IN_POWF
7808 || fcode0 == BUILT_IN_POWL)
7810 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7811 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7812 if (TREE_CODE (arg01) == REAL_CST
7813 && ! TREE_CONSTANT_OVERFLOW (arg01)
7814 && operand_equal_p (arg1, arg00, 0))
7816 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7820 c = TREE_REAL_CST (arg01);
7821 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7822 arg = build_real (type, c);
7823 arglist = build_tree_list (NULL_TREE, arg);
7824 arglist = tree_cons (NULL_TREE, arg1, arglist);
7825 return build_function_call_expr (powfn, arglist);
7831 case TRUNC_DIV_EXPR:
7832 case ROUND_DIV_EXPR:
7833 case FLOOR_DIV_EXPR:
7835 case EXACT_DIV_EXPR:
7836 if (integer_onep (arg1))
7837 return non_lvalue (fold_convert (type, arg0));
7838 if (integer_zerop (arg1))
7841 if (!TYPE_UNSIGNED (type)
7842 && TREE_CODE (arg1) == INTEGER_CST
7843 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7844 && TREE_INT_CST_HIGH (arg1) == -1)
7845 return fold_convert (type, negate_expr (arg0));
7847 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7848 operation, EXACT_DIV_EXPR.
7850 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7851 At one time others generated faster code, it's not clear if they do
7852 after the last round to changes to the DIV code in expmed.c. */
7853 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7854 && multiple_of_p (type, arg0, arg1))
7855 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7857 if (TREE_CODE (arg1) == INTEGER_CST
7858 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7860 return fold_convert (type, tem);
7865 case FLOOR_MOD_EXPR:
7866 case ROUND_MOD_EXPR:
7867 case TRUNC_MOD_EXPR:
7868 if (integer_onep (arg1))
7869 return omit_one_operand (type, integer_zero_node, arg0);
7870 if (integer_zerop (arg1))
7873 /* X % -1 is zero. */
7874 if (!TYPE_UNSIGNED (type)
7875 && TREE_CODE (arg1) == INTEGER_CST
7876 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7877 && TREE_INT_CST_HIGH (arg1) == -1)
7878 return omit_one_operand (type, integer_zero_node, arg0);
7880 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7881 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7882 if (code == TRUNC_MOD_EXPR
7883 && TYPE_UNSIGNED (type)
7884 && integer_pow2p (arg1))
7886 unsigned HOST_WIDE_INT high, low;
7890 l = tree_log2 (arg1);
7891 if (l >= HOST_BITS_PER_WIDE_INT)
7893 high = ((unsigned HOST_WIDE_INT) 1
7894 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7900 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7903 mask = build_int_cst_wide (type, low, high);
7904 return fold (build2 (BIT_AND_EXPR, type,
7905 fold_convert (type, arg0), mask));
7908 /* X % -C is the same as X % C. */
7909 if (code == TRUNC_MOD_EXPR
7910 && !TYPE_UNSIGNED (type)
7911 && TREE_CODE (arg1) == INTEGER_CST
7912 && TREE_INT_CST_HIGH (arg1) < 0
7914 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7915 && !sign_bit_p (arg1, arg1))
7916 return fold (build2 (code, type, fold_convert (type, arg0),
7917 fold_convert (type, negate_expr (arg1))));
7919 /* X % -Y is the same as X % Y. */
7920 if (code == TRUNC_MOD_EXPR
7921 && !TYPE_UNSIGNED (type)
7922 && TREE_CODE (arg1) == NEGATE_EXPR
7924 return fold (build2 (code, type, fold_convert (type, arg0),
7925 fold_convert (type, TREE_OPERAND (arg1, 0))));
7927 if (TREE_CODE (arg1) == INTEGER_CST
7928 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7930 return fold_convert (type, tem);
7936 if (integer_all_onesp (arg0))
7937 return omit_one_operand (type, arg0, arg1);
7941 /* Optimize -1 >> x for arithmetic right shifts. */
7942 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7943 return omit_one_operand (type, arg0, arg1);
7944 /* ... fall through ... */
7948 if (integer_zerop (arg1))
7949 return non_lvalue (fold_convert (type, arg0));
7950 if (integer_zerop (arg0))
7951 return omit_one_operand (type, arg0, arg1);
7953 /* Since negative shift count is not well-defined,
7954 don't try to compute it in the compiler. */
7955 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7957 /* Rewrite an LROTATE_EXPR by a constant into an
7958 RROTATE_EXPR by a new constant. */
7959 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7961 tree tem = build_int_cst (NULL_TREE,
7962 GET_MODE_BITSIZE (TYPE_MODE (type)));
7963 tem = fold_convert (TREE_TYPE (arg1), tem);
7964 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7965 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7968 /* If we have a rotate of a bit operation with the rotate count and
7969 the second operand of the bit operation both constant,
7970 permute the two operations. */
7971 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7972 && (TREE_CODE (arg0) == BIT_AND_EXPR
7973 || TREE_CODE (arg0) == BIT_IOR_EXPR
7974 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7975 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7976 return fold (build2 (TREE_CODE (arg0), type,
7977 fold (build2 (code, type,
7978 TREE_OPERAND (arg0, 0), arg1)),
7979 fold (build2 (code, type,
7980 TREE_OPERAND (arg0, 1), arg1))));
7982 /* Two consecutive rotates adding up to the width of the mode can
7984 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7985 && TREE_CODE (arg0) == RROTATE_EXPR
7986 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7987 && TREE_INT_CST_HIGH (arg1) == 0
7988 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7989 && ((TREE_INT_CST_LOW (arg1)
7990 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7991 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7992 return TREE_OPERAND (arg0, 0);
7997 if (operand_equal_p (arg0, arg1, 0))
7998 return omit_one_operand (type, arg0, arg1);
7999 if (INTEGRAL_TYPE_P (type)
8000 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8001 return omit_one_operand (type, arg1, arg0);
8005 if (operand_equal_p (arg0, arg1, 0))
8006 return omit_one_operand (type, arg0, arg1);
8007 if (INTEGRAL_TYPE_P (type)
8008 && TYPE_MAX_VALUE (type)
8009 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8010 return omit_one_operand (type, arg1, arg0);
8013 case TRUTH_NOT_EXPR:
8014 /* The argument to invert_truthvalue must have Boolean type. */
8015 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8016 arg0 = fold_convert (boolean_type_node, arg0);
8018 /* Note that the operand of this must be an int
8019 and its values must be 0 or 1.
8020 ("true" is a fixed value perhaps depending on the language,
8021 but we don't handle values other than 1 correctly yet.) */
8022 tem = invert_truthvalue (arg0);
8023 /* Avoid infinite recursion. */
8024 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
8026 tem = fold_single_bit_test (code, arg0, arg1, type);
8031 return fold_convert (type, tem);
8033 case TRUTH_ANDIF_EXPR:
8034 /* Note that the operands of this must be ints
8035 and their values must be 0 or 1.
8036 ("true" is a fixed value perhaps depending on the language.) */
8037 /* If first arg is constant zero, return it. */
8038 if (integer_zerop (arg0))
8039 return fold_convert (type, arg0);
8040 case TRUTH_AND_EXPR:
8041 /* If either arg is constant true, drop it. */
8042 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8043 return non_lvalue (fold_convert (type, arg1));
8044 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8045 /* Preserve sequence points. */
8046 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8047 return non_lvalue (fold_convert (type, arg0));
8048 /* If second arg is constant zero, result is zero, but first arg
8049 must be evaluated. */
8050 if (integer_zerop (arg1))
8051 return omit_one_operand (type, arg1, arg0);
8052 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8053 case will be handled here. */
8054 if (integer_zerop (arg0))
8055 return omit_one_operand (type, arg0, arg1);
8057 /* !X && X is always false. */
8058 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8059 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8060 return omit_one_operand (type, integer_zero_node, arg1);
8061 /* X && !X is always false. */
8062 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8063 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8064 return omit_one_operand (type, integer_zero_node, arg0);
8066 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8067 means A >= Y && A != MAX, but in this case we know that
8070 if (!TREE_SIDE_EFFECTS (arg0)
8071 && !TREE_SIDE_EFFECTS (arg1))
8073 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8075 return fold (build2 (code, type, tem, arg1));
8077 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8079 return fold (build2 (code, type, arg0, tem));
8083 /* We only do these simplifications if we are optimizing. */
8087 /* Check for things like (A || B) && (A || C). We can convert this
8088 to A || (B && C). Note that either operator can be any of the four
8089 truth and/or operations and the transformation will still be
8090 valid. Also note that we only care about order for the
8091 ANDIF and ORIF operators. If B contains side effects, this
8092 might change the truth-value of A. */
8093 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8094 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8095 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8096 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8097 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8098 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8100 tree a00 = TREE_OPERAND (arg0, 0);
8101 tree a01 = TREE_OPERAND (arg0, 1);
8102 tree a10 = TREE_OPERAND (arg1, 0);
8103 tree a11 = TREE_OPERAND (arg1, 1);
8104 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8105 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8106 && (code == TRUTH_AND_EXPR
8107 || code == TRUTH_OR_EXPR));
8109 if (operand_equal_p (a00, a10, 0))
8110 return fold (build2 (TREE_CODE (arg0), type, a00,
8111 fold (build2 (code, type, a01, a11))));
8112 else if (commutative && operand_equal_p (a00, a11, 0))
8113 return fold (build2 (TREE_CODE (arg0), type, a00,
8114 fold (build2 (code, type, a01, a10))));
8115 else if (commutative && operand_equal_p (a01, a10, 0))
8116 return fold (build2 (TREE_CODE (arg0), type, a01,
8117 fold (build2 (code, type, a00, a11))));
8119 /* This case if tricky because we must either have commutative
8120 operators or else A10 must not have side-effects. */
8122 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8123 && operand_equal_p (a01, a11, 0))
8124 return fold (build2 (TREE_CODE (arg0), type,
8125 fold (build2 (code, type, a00, a10)),
8129 /* See if we can build a range comparison. */
8130 if (0 != (tem = fold_range_test (t)))
8133 /* Check for the possibility of merging component references. If our
8134 lhs is another similar operation, try to merge its rhs with our
8135 rhs. Then try to merge our lhs and rhs. */
8136 if (TREE_CODE (arg0) == code
8137 && 0 != (tem = fold_truthop (code, type,
8138 TREE_OPERAND (arg0, 1), arg1)))
8139 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8141 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8146 case TRUTH_ORIF_EXPR:
8147 /* Note that the operands of this must be ints
8148 and their values must be 0 or true.
8149 ("true" is a fixed value perhaps depending on the language.) */
8150 /* If first arg is constant true, return it. */
8151 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8152 return fold_convert (type, arg0);
8154 /* If either arg is constant zero, drop it. */
8155 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8156 return non_lvalue (fold_convert (type, arg1));
8157 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8158 /* Preserve sequence points. */
8159 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8160 return non_lvalue (fold_convert (type, arg0));
8161 /* If second arg is constant true, result is true, but we must
8162 evaluate first arg. */
8163 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8164 return omit_one_operand (type, arg1, arg0);
8165 /* Likewise for first arg, but note this only occurs here for
8167 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8168 return omit_one_operand (type, arg0, arg1);
8170 /* !X || X is always true. */
8171 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8172 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8173 return omit_one_operand (type, integer_one_node, arg1);
8174 /* X || !X is always true. */
8175 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8176 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8177 return omit_one_operand (type, integer_one_node, arg0);
8181 case TRUTH_XOR_EXPR:
8182 /* If the second arg is constant zero, drop it. */
8183 if (integer_zerop (arg1))
8184 return non_lvalue (fold_convert (type, arg0));
8185 /* If the second arg is constant true, this is a logical inversion. */
8186 if (integer_onep (arg1))
8187 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
8188 /* Identical arguments cancel to zero. */
8189 if (operand_equal_p (arg0, arg1, 0))
8190 return omit_one_operand (type, integer_zero_node, arg0);
8192 /* !X ^ X is always true. */
8193 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8194 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8195 return omit_one_operand (type, integer_one_node, arg1);
8197 /* X ^ !X is always true. */
8198 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8199 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8200 return omit_one_operand (type, integer_one_node, arg0);
8210 /* If one arg is a real or integer constant, put it last. */
8211 if (tree_swap_operands_p (arg0, arg1, true))
8212 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
8214 /* If this is an equality comparison of the address of a non-weak
8215 object against zero, then we know the result. */
8216 if ((code == EQ_EXPR || code == NE_EXPR)
8217 && TREE_CODE (arg0) == ADDR_EXPR
8218 && DECL_P (TREE_OPERAND (arg0, 0))
8219 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8220 && integer_zerop (arg1))
8221 return constant_boolean_node (code != EQ_EXPR, type);
8223 /* If this is an equality comparison of the address of two non-weak,
8224 unaliased symbols neither of which are extern (since we do not
8225 have access to attributes for externs), then we know the result. */
8226 if ((code == EQ_EXPR || code == NE_EXPR)
8227 && TREE_CODE (arg0) == ADDR_EXPR
8228 && DECL_P (TREE_OPERAND (arg0, 0))
8229 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8230 && ! lookup_attribute ("alias",
8231 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8232 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8233 && TREE_CODE (arg1) == ADDR_EXPR
8234 && DECL_P (TREE_OPERAND (arg1, 0))
8235 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8236 && ! lookup_attribute ("alias",
8237 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8238 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8239 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8240 ? code == EQ_EXPR : code != EQ_EXPR,
8243 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8245 tree targ0 = strip_float_extensions (arg0);
8246 tree targ1 = strip_float_extensions (arg1);
8247 tree newtype = TREE_TYPE (targ0);
8249 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8250 newtype = TREE_TYPE (targ1);
8252 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8253 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8254 return fold (build2 (code, type, fold_convert (newtype, targ0),
8255 fold_convert (newtype, targ1)));
8257 /* (-a) CMP (-b) -> b CMP a */
8258 if (TREE_CODE (arg0) == NEGATE_EXPR
8259 && TREE_CODE (arg1) == NEGATE_EXPR)
8260 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
8261 TREE_OPERAND (arg0, 0)));
8263 if (TREE_CODE (arg1) == REAL_CST)
8265 REAL_VALUE_TYPE cst;
8266 cst = TREE_REAL_CST (arg1);
8268 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8269 if (TREE_CODE (arg0) == NEGATE_EXPR)
8271 fold (build2 (swap_tree_comparison (code), type,
8272 TREE_OPERAND (arg0, 0),
8273 build_real (TREE_TYPE (arg1),
8274 REAL_VALUE_NEGATE (cst))));
8276 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8277 /* a CMP (-0) -> a CMP 0 */
8278 if (REAL_VALUE_MINUS_ZERO (cst))
8279 return fold (build2 (code, type, arg0,
8280 build_real (TREE_TYPE (arg1), dconst0)));
8282 /* x != NaN is always true, other ops are always false. */
8283 if (REAL_VALUE_ISNAN (cst)
8284 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8286 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8287 return omit_one_operand (type, tem, arg0);
8290 /* Fold comparisons against infinity. */
8291 if (REAL_VALUE_ISINF (cst))
8293 tem = fold_inf_compare (code, type, arg0, arg1);
8294 if (tem != NULL_TREE)
8299 /* If this is a comparison of a real constant with a PLUS_EXPR
8300 or a MINUS_EXPR of a real constant, we can convert it into a
8301 comparison with a revised real constant as long as no overflow
8302 occurs when unsafe_math_optimizations are enabled. */
8303 if (flag_unsafe_math_optimizations
8304 && TREE_CODE (arg1) == REAL_CST
8305 && (TREE_CODE (arg0) == PLUS_EXPR
8306 || TREE_CODE (arg0) == MINUS_EXPR)
8307 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8308 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8309 ? MINUS_EXPR : PLUS_EXPR,
8310 arg1, TREE_OPERAND (arg0, 1), 0))
8311 && ! TREE_CONSTANT_OVERFLOW (tem))
8312 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8314 /* Likewise, we can simplify a comparison of a real constant with
8315 a MINUS_EXPR whose first operand is also a real constant, i.e.
8316 (c1 - x) < c2 becomes x > c1-c2. */
8317 if (flag_unsafe_math_optimizations
8318 && TREE_CODE (arg1) == REAL_CST
8319 && TREE_CODE (arg0) == MINUS_EXPR
8320 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8321 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8323 && ! TREE_CONSTANT_OVERFLOW (tem))
8324 return fold (build2 (swap_tree_comparison (code), type,
8325 TREE_OPERAND (arg0, 1), tem));
8327 /* Fold comparisons against built-in math functions. */
8328 if (TREE_CODE (arg1) == REAL_CST
8329 && flag_unsafe_math_optimizations
8330 && ! flag_errno_math)
8332 enum built_in_function fcode = builtin_mathfn_code (arg0);
8334 if (fcode != END_BUILTINS)
8336 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8337 if (tem != NULL_TREE)
8343 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8344 if (TREE_CONSTANT (arg1)
8345 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8346 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8347 /* This optimization is invalid for ordered comparisons
8348 if CONST+INCR overflows or if foo+incr might overflow.
8349 This optimization is invalid for floating point due to rounding.
8350 For pointer types we assume overflow doesn't happen. */
8351 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8352 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8353 && (code == EQ_EXPR || code == NE_EXPR))))
8355 tree varop, newconst;
8357 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8359 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8360 arg1, TREE_OPERAND (arg0, 1)));
8361 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8362 TREE_OPERAND (arg0, 0),
8363 TREE_OPERAND (arg0, 1));
8367 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8368 arg1, TREE_OPERAND (arg0, 1)));
8369 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8370 TREE_OPERAND (arg0, 0),
8371 TREE_OPERAND (arg0, 1));
8375 /* If VAROP is a reference to a bitfield, we must mask
8376 the constant by the width of the field. */
8377 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8378 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8379 && host_integerp (DECL_SIZE (TREE_OPERAND
8380 (TREE_OPERAND (varop, 0), 1)), 1))
8382 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8383 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8384 tree folded_compare, shift;
8386 /* First check whether the comparison would come out
8387 always the same. If we don't do that we would
8388 change the meaning with the masking. */
8389 folded_compare = fold (build2 (code, type,
8390 TREE_OPERAND (varop, 0), arg1));
8391 if (integer_zerop (folded_compare)
8392 || integer_onep (folded_compare))
8393 return omit_one_operand (type, folded_compare, varop);
8395 shift = build_int_cst (NULL_TREE,
8396 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8397 shift = fold_convert (TREE_TYPE (varop), shift);
8398 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8400 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8404 return fold (build2 (code, type, varop, newconst));
8407 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8408 This transformation affects the cases which are handled in later
8409 optimizations involving comparisons with non-negative constants. */
8410 if (TREE_CODE (arg1) == INTEGER_CST
8411 && TREE_CODE (arg0) != INTEGER_CST
8412 && tree_int_cst_sgn (arg1) > 0)
8417 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8418 return fold (build2 (GT_EXPR, type, arg0, arg1));
8421 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8422 return fold (build2 (LE_EXPR, type, arg0, arg1));
8429 /* Comparisons with the highest or lowest possible integer of
8430 the specified size will have known values.
8432 This is quite similar to fold_relational_hi_lo, however,
8433 attempts to share the code have been nothing but trouble. */
8435 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8437 if (TREE_CODE (arg1) == INTEGER_CST
8438 && ! TREE_CONSTANT_OVERFLOW (arg1)
8439 && width <= HOST_BITS_PER_WIDE_INT
8440 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8441 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8443 unsigned HOST_WIDE_INT signed_max;
8444 unsigned HOST_WIDE_INT max, min;
8446 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8448 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8450 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8456 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8459 if (TREE_INT_CST_HIGH (arg1) == 0
8460 && TREE_INT_CST_LOW (arg1) == max)
8464 return omit_one_operand (type, integer_zero_node, arg0);
8467 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8470 return omit_one_operand (type, integer_one_node, arg0);
8473 return fold (build2 (NE_EXPR, type, arg0, arg1));
8475 /* The GE_EXPR and LT_EXPR cases above are not normally
8476 reached because of previous transformations. */
8481 else if (TREE_INT_CST_HIGH (arg1) == 0
8482 && TREE_INT_CST_LOW (arg1) == max - 1)
8486 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8487 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8489 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8490 return fold (build2 (NE_EXPR, type, arg0, arg1));
8494 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8495 && TREE_INT_CST_LOW (arg1) == min)
8499 return omit_one_operand (type, integer_zero_node, arg0);
8502 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8505 return omit_one_operand (type, integer_one_node, arg0);
8508 return fold (build2 (NE_EXPR, type, arg0, arg1));
8513 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8514 && TREE_INT_CST_LOW (arg1) == min + 1)
8518 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8519 return fold (build2 (NE_EXPR, type, arg0, arg1));
8521 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8522 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8527 else if (!in_gimple_form
8528 && TREE_INT_CST_HIGH (arg1) == 0
8529 && TREE_INT_CST_LOW (arg1) == signed_max
8530 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8531 /* signed_type does not work on pointer types. */
8532 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8534 /* The following case also applies to X < signed_max+1
8535 and X >= signed_max+1 because previous transformations. */
8536 if (code == LE_EXPR || code == GT_EXPR)
8539 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8540 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8542 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8543 type, fold_convert (st0, arg0),
8544 fold_convert (st1, integer_zero_node)));
8550 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8551 a MINUS_EXPR of a constant, we can convert it into a comparison with
8552 a revised constant as long as no overflow occurs. */
8553 if ((code == EQ_EXPR || code == NE_EXPR)
8554 && TREE_CODE (arg1) == INTEGER_CST
8555 && (TREE_CODE (arg0) == PLUS_EXPR
8556 || TREE_CODE (arg0) == MINUS_EXPR)
8557 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8558 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8559 ? MINUS_EXPR : PLUS_EXPR,
8560 arg1, TREE_OPERAND (arg0, 1), 0))
8561 && ! TREE_CONSTANT_OVERFLOW (tem))
8562 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8564 /* Similarly for a NEGATE_EXPR. */
8565 else if ((code == EQ_EXPR || code == NE_EXPR)
8566 && TREE_CODE (arg0) == NEGATE_EXPR
8567 && TREE_CODE (arg1) == INTEGER_CST
8568 && 0 != (tem = negate_expr (arg1))
8569 && TREE_CODE (tem) == INTEGER_CST
8570 && ! TREE_CONSTANT_OVERFLOW (tem))
8571 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8573 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8574 for !=. Don't do this for ordered comparisons due to overflow. */
8575 else if ((code == NE_EXPR || code == EQ_EXPR)
8576 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8577 return fold (build2 (code, type,
8578 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8580 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8581 && TREE_CODE (arg0) == NOP_EXPR)
8583 /* If we are widening one operand of an integer comparison,
8584 see if the other operand is similarly being widened. Perhaps we
8585 can do the comparison in the narrower type. */
8586 tem = fold_widened_comparison (code, type, arg0, arg1);
8590 /* Or if we are changing signedness. */
8591 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8596 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8597 constant, we can simplify it. */
8598 else if (TREE_CODE (arg1) == INTEGER_CST
8599 && (TREE_CODE (arg0) == MIN_EXPR
8600 || TREE_CODE (arg0) == MAX_EXPR)
8601 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8602 return optimize_minmax_comparison (t);
8604 /* If we are comparing an ABS_EXPR with a constant, we can
8605 convert all the cases into explicit comparisons, but they may
8606 well not be faster than doing the ABS and one comparison.
8607 But ABS (X) <= C is a range comparison, which becomes a subtraction
8608 and a comparison, and is probably faster. */
8609 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8610 && TREE_CODE (arg0) == ABS_EXPR
8611 && ! TREE_SIDE_EFFECTS (arg0)
8612 && (0 != (tem = negate_expr (arg1)))
8613 && TREE_CODE (tem) == INTEGER_CST
8614 && ! TREE_CONSTANT_OVERFLOW (tem))
8615 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8616 build2 (GE_EXPR, type,
8617 TREE_OPERAND (arg0, 0), tem),
8618 build2 (LE_EXPR, type,
8619 TREE_OPERAND (arg0, 0), arg1)));
8621 /* If this is an EQ or NE comparison with zero and ARG0 is
8622 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8623 two operations, but the latter can be done in one less insn
8624 on machines that have only two-operand insns or on which a
8625 constant cannot be the first operand. */
8626 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8627 && TREE_CODE (arg0) == BIT_AND_EXPR)
8629 tree arg00 = TREE_OPERAND (arg0, 0);
8630 tree arg01 = TREE_OPERAND (arg0, 1);
8631 if (TREE_CODE (arg00) == LSHIFT_EXPR
8632 && integer_onep (TREE_OPERAND (arg00, 0)))
8634 fold (build2 (code, type,
8635 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8636 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8637 arg01, TREE_OPERAND (arg00, 1)),
8638 fold_convert (TREE_TYPE (arg0),
8641 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8642 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8644 fold (build2 (code, type,
8645 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8646 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8647 arg00, TREE_OPERAND (arg01, 1)),
8648 fold_convert (TREE_TYPE (arg0),
8653 /* If this is an NE or EQ comparison of zero against the result of a
8654 signed MOD operation whose second operand is a power of 2, make
8655 the MOD operation unsigned since it is simpler and equivalent. */
8656 if ((code == NE_EXPR || code == EQ_EXPR)
8657 && integer_zerop (arg1)
8658 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8659 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8660 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8661 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8662 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8663 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8665 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8666 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8667 fold_convert (newtype,
8668 TREE_OPERAND (arg0, 0)),
8669 fold_convert (newtype,
8670 TREE_OPERAND (arg0, 1))));
8672 return fold (build2 (code, type, newmod,
8673 fold_convert (newtype, arg1)));
8676 /* If this is an NE comparison of zero with an AND of one, remove the
8677 comparison since the AND will give the correct value. */
8678 if (code == NE_EXPR && integer_zerop (arg1)
8679 && TREE_CODE (arg0) == BIT_AND_EXPR
8680 && integer_onep (TREE_OPERAND (arg0, 1)))
8681 return fold_convert (type, arg0);
8683 /* If we have (A & C) == C where C is a power of 2, convert this into
8684 (A & C) != 0. Similarly for NE_EXPR. */
8685 if ((code == EQ_EXPR || code == NE_EXPR)
8686 && TREE_CODE (arg0) == BIT_AND_EXPR
8687 && integer_pow2p (TREE_OPERAND (arg0, 1))
8688 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8689 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8690 arg0, fold_convert (TREE_TYPE (arg0),
8691 integer_zero_node)));
8693 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8694 2, then fold the expression into shifts and logical operations. */
8695 tem = fold_single_bit_test (code, arg0, arg1, type);
8699 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8700 Similarly for NE_EXPR. */
8701 if ((code == EQ_EXPR || code == NE_EXPR)
8702 && TREE_CODE (arg0) == BIT_AND_EXPR
8703 && TREE_CODE (arg1) == INTEGER_CST
8704 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8706 tree notc = fold (build1 (BIT_NOT_EXPR,
8707 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8708 TREE_OPERAND (arg0, 1)));
8709 tree dandnotc = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8711 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8712 if (integer_nonzerop (dandnotc))
8713 return omit_one_operand (type, rslt, arg0);
8716 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8717 Similarly for NE_EXPR. */
8718 if ((code == EQ_EXPR || code == NE_EXPR)
8719 && TREE_CODE (arg0) == BIT_IOR_EXPR
8720 && TREE_CODE (arg1) == INTEGER_CST
8721 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8723 tree notd = fold (build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8724 tree candnotd = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8725 TREE_OPERAND (arg0, 1), notd));
8726 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8727 if (integer_nonzerop (candnotd))
8728 return omit_one_operand (type, rslt, arg0);
8731 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8732 and similarly for >= into !=. */
8733 if ((code == LT_EXPR || code == GE_EXPR)
8734 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8735 && TREE_CODE (arg1) == LSHIFT_EXPR
8736 && integer_onep (TREE_OPERAND (arg1, 0)))
8737 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8738 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8739 TREE_OPERAND (arg1, 1)),
8740 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8742 else if ((code == LT_EXPR || code == GE_EXPR)
8743 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8744 && (TREE_CODE (arg1) == NOP_EXPR
8745 || TREE_CODE (arg1) == CONVERT_EXPR)
8746 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8747 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8749 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8750 fold_convert (TREE_TYPE (arg0),
8751 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8752 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8754 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8756 /* Simplify comparison of something with itself. (For IEEE
8757 floating-point, we can only do some of these simplifications.) */
8758 if (operand_equal_p (arg0, arg1, 0))
8763 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8764 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8765 return constant_boolean_node (1, type);
8770 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8771 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8772 return constant_boolean_node (1, type);
8773 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8776 /* For NE, we can only do this simplification if integer
8777 or we don't honor IEEE floating point NaNs. */
8778 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8779 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8781 /* ... fall through ... */
8784 return constant_boolean_node (0, type);
8790 /* If we are comparing an expression that just has comparisons
8791 of two integer values, arithmetic expressions of those comparisons,
8792 and constants, we can simplify it. There are only three cases
8793 to check: the two values can either be equal, the first can be
8794 greater, or the second can be greater. Fold the expression for
8795 those three values. Since each value must be 0 or 1, we have
8796 eight possibilities, each of which corresponds to the constant 0
8797 or 1 or one of the six possible comparisons.
8799 This handles common cases like (a > b) == 0 but also handles
8800 expressions like ((x > y) - (y > x)) > 0, which supposedly
8801 occur in macroized code. */
8803 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8805 tree cval1 = 0, cval2 = 0;
8808 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8809 /* Don't handle degenerate cases here; they should already
8810 have been handled anyway. */
8811 && cval1 != 0 && cval2 != 0
8812 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8813 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8814 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8815 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8816 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8817 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8818 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8820 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8821 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8823 /* We can't just pass T to eval_subst in case cval1 or cval2
8824 was the same as ARG1. */
8827 = fold (build2 (code, type,
8828 eval_subst (arg0, cval1, maxval,
8832 = fold (build2 (code, type,
8833 eval_subst (arg0, cval1, maxval,
8837 = fold (build2 (code, type,
8838 eval_subst (arg0, cval1, minval,
8842 /* All three of these results should be 0 or 1. Confirm they
8843 are. Then use those values to select the proper code
8846 if ((integer_zerop (high_result)
8847 || integer_onep (high_result))
8848 && (integer_zerop (equal_result)
8849 || integer_onep (equal_result))
8850 && (integer_zerop (low_result)
8851 || integer_onep (low_result)))
8853 /* Make a 3-bit mask with the high-order bit being the
8854 value for `>', the next for '=', and the low for '<'. */
8855 switch ((integer_onep (high_result) * 4)
8856 + (integer_onep (equal_result) * 2)
8857 + integer_onep (low_result))
8861 return omit_one_operand (type, integer_zero_node, arg0);
8882 return omit_one_operand (type, integer_one_node, arg0);
8885 tem = build2 (code, type, cval1, cval2);
8887 return save_expr (tem);
8894 /* If this is a comparison of a field, we may be able to simplify it. */
8895 if (((TREE_CODE (arg0) == COMPONENT_REF
8896 && lang_hooks.can_use_bit_fields_p ())
8897 || TREE_CODE (arg0) == BIT_FIELD_REF)
8898 && (code == EQ_EXPR || code == NE_EXPR)
8899 /* Handle the constant case even without -O
8900 to make sure the warnings are given. */
8901 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8903 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8908 /* If this is a comparison of complex values and either or both sides
8909 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8910 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8911 This may prevent needless evaluations. */
8912 if ((code == EQ_EXPR || code == NE_EXPR)
8913 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8914 && (TREE_CODE (arg0) == COMPLEX_EXPR
8915 || TREE_CODE (arg1) == COMPLEX_EXPR
8916 || TREE_CODE (arg0) == COMPLEX_CST
8917 || TREE_CODE (arg1) == COMPLEX_CST))
8919 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8920 tree real0, imag0, real1, imag1;
8922 arg0 = save_expr (arg0);
8923 arg1 = save_expr (arg1);
8924 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8925 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8926 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8927 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8929 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8932 fold (build2 (code, type, real0, real1)),
8933 fold (build2 (code, type, imag0, imag1))));
8936 /* Optimize comparisons of strlen vs zero to a compare of the
8937 first character of the string vs zero. To wit,
8938 strlen(ptr) == 0 => *ptr == 0
8939 strlen(ptr) != 0 => *ptr != 0
8940 Other cases should reduce to one of these two (or a constant)
8941 due to the return value of strlen being unsigned. */
8942 if ((code == EQ_EXPR || code == NE_EXPR)
8943 && integer_zerop (arg1)
8944 && TREE_CODE (arg0) == CALL_EXPR)
8946 tree fndecl = get_callee_fndecl (arg0);
8950 && DECL_BUILT_IN (fndecl)
8951 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8952 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8953 && (arglist = TREE_OPERAND (arg0, 1))
8954 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8955 && ! TREE_CHAIN (arglist))
8956 return fold (build2 (code, type,
8957 build1 (INDIRECT_REF, char_type_node,
8958 TREE_VALUE (arglist)),
8959 fold_convert (char_type_node,
8960 integer_zero_node)));
8963 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8964 into a single range test. */
8965 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8966 && TREE_CODE (arg1) == INTEGER_CST
8967 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8968 && !integer_zerop (TREE_OPERAND (arg0, 1))
8969 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8970 && !TREE_OVERFLOW (arg1))
8972 t1 = fold_div_compare (code, type, arg0, arg1);
8973 if (t1 != NULL_TREE)
8977 if ((code == EQ_EXPR || code == NE_EXPR)
8978 && !TREE_SIDE_EFFECTS (arg0)
8979 && integer_zerop (arg1)
8980 && tree_expr_nonzero_p (arg0))
8981 return constant_boolean_node (code==NE_EXPR, type);
8983 t1 = fold_relational_const (code, type, arg0, arg1);
8984 return t1 == NULL_TREE ? t : t1;
8986 case UNORDERED_EXPR:
8994 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8996 t1 = fold_relational_const (code, type, arg0, arg1);
8997 if (t1 != NULL_TREE)
9001 /* If the first operand is NaN, the result is constant. */
9002 if (TREE_CODE (arg0) == REAL_CST
9003 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9004 && (code != LTGT_EXPR || ! flag_trapping_math))
9006 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9009 return omit_one_operand (type, t1, arg1);
9012 /* If the second operand is NaN, the result is constant. */
9013 if (TREE_CODE (arg1) == REAL_CST
9014 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9015 && (code != LTGT_EXPR || ! flag_trapping_math))
9017 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9020 return omit_one_operand (type, t1, arg0);
9023 /* Simplify unordered comparison of something with itself. */
9024 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9025 && operand_equal_p (arg0, arg1, 0))
9026 return constant_boolean_node (1, type);
9028 if (code == LTGT_EXPR
9029 && !flag_trapping_math
9030 && operand_equal_p (arg0, arg1, 0))
9031 return constant_boolean_node (0, type);
9033 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9035 tree targ0 = strip_float_extensions (arg0);
9036 tree targ1 = strip_float_extensions (arg1);
9037 tree newtype = TREE_TYPE (targ0);
9039 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9040 newtype = TREE_TYPE (targ1);
9042 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9043 return fold (build2 (code, type, fold_convert (newtype, targ0),
9044 fold_convert (newtype, targ1)));
9050 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9051 so all simple results must be passed through pedantic_non_lvalue. */
9052 if (TREE_CODE (arg0) == INTEGER_CST)
9054 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
9055 /* Only optimize constant conditions when the selected branch
9056 has the same type as the COND_EXPR. This avoids optimizing
9057 away "c ? x : throw", where the throw has a void type. */
9058 if (! VOID_TYPE_P (TREE_TYPE (tem))
9059 || VOID_TYPE_P (type))
9060 return pedantic_non_lvalue (tem);
9063 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
9064 return pedantic_omit_one_operand (type, arg1, arg0);
9066 /* If we have A op B ? A : C, we may be able to convert this to a
9067 simpler expression, depending on the operation and the values
9068 of B and C. Signed zeros prevent all of these transformations,
9069 for reasons given above each one.
9071 Also try swapping the arguments and inverting the conditional. */
9072 if (COMPARISON_CLASS_P (arg0)
9073 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9074 arg1, TREE_OPERAND (arg0, 1))
9075 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9077 tem = fold_cond_expr_with_comparison (type, arg0,
9078 TREE_OPERAND (t, 1),
9079 TREE_OPERAND (t, 2));
9084 if (COMPARISON_CLASS_P (arg0)
9085 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9086 TREE_OPERAND (t, 2),
9087 TREE_OPERAND (arg0, 1))
9088 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
9090 tem = invert_truthvalue (arg0);
9091 if (COMPARISON_CLASS_P (tem))
9093 tem = fold_cond_expr_with_comparison (type, tem,
9094 TREE_OPERAND (t, 2),
9095 TREE_OPERAND (t, 1));
9101 /* If the second operand is simpler than the third, swap them
9102 since that produces better jump optimization results. */
9103 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
9104 TREE_OPERAND (t, 2), false))
9106 /* See if this can be inverted. If it can't, possibly because
9107 it was a floating-point inequality comparison, don't do
9109 tem = invert_truthvalue (arg0);
9111 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9112 return fold (build3 (code, type, tem,
9113 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
9116 /* Convert A ? 1 : 0 to simply A. */
9117 if (integer_onep (TREE_OPERAND (t, 1))
9118 && integer_zerop (TREE_OPERAND (t, 2))
9119 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
9120 call to fold will try to move the conversion inside
9121 a COND, which will recurse. In that case, the COND_EXPR
9122 is probably the best choice, so leave it alone. */
9123 && type == TREE_TYPE (arg0))
9124 return pedantic_non_lvalue (arg0);
9126 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9127 over COND_EXPR in cases such as floating point comparisons. */
9128 if (integer_zerop (TREE_OPERAND (t, 1))
9129 && integer_onep (TREE_OPERAND (t, 2))
9130 && truth_value_p (TREE_CODE (arg0)))
9131 return pedantic_non_lvalue (fold_convert (type,
9132 invert_truthvalue (arg0)));
9134 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9135 if (TREE_CODE (arg0) == LT_EXPR
9136 && integer_zerop (TREE_OPERAND (arg0, 1))
9137 && integer_zerop (TREE_OPERAND (t, 2))
9138 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9139 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
9140 TREE_TYPE (tem), tem, arg1)));
9142 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9143 already handled above. */
9144 if (TREE_CODE (arg0) == BIT_AND_EXPR
9145 && integer_onep (TREE_OPERAND (arg0, 1))
9146 && integer_zerop (TREE_OPERAND (t, 2))
9147 && integer_pow2p (arg1))
9149 tree tem = TREE_OPERAND (arg0, 0);
9151 if (TREE_CODE (tem) == RSHIFT_EXPR
9152 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9153 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9154 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9155 return fold (build2 (BIT_AND_EXPR, type,
9156 TREE_OPERAND (tem, 0), arg1));
9159 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9160 is probably obsolete because the first operand should be a
9161 truth value (that's why we have the two cases above), but let's
9162 leave it in until we can confirm this for all front-ends. */
9163 if (integer_zerop (TREE_OPERAND (t, 2))
9164 && TREE_CODE (arg0) == NE_EXPR
9165 && integer_zerop (TREE_OPERAND (arg0, 1))
9166 && integer_pow2p (arg1)
9167 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9168 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9169 arg1, OEP_ONLY_CONST))
9170 return pedantic_non_lvalue (fold_convert (type,
9171 TREE_OPERAND (arg0, 0)));
9173 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9174 if (integer_zerop (TREE_OPERAND (t, 2))
9175 && truth_value_p (TREE_CODE (arg0))
9176 && truth_value_p (TREE_CODE (arg1)))
9177 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
9179 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9180 if (integer_onep (TREE_OPERAND (t, 2))
9181 && truth_value_p (TREE_CODE (arg0))
9182 && truth_value_p (TREE_CODE (arg1)))
9184 /* Only perform transformation if ARG0 is easily inverted. */
9185 tem = invert_truthvalue (arg0);
9186 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9187 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
9190 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9191 if (integer_zerop (arg1)
9192 && truth_value_p (TREE_CODE (arg0))
9193 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9195 /* Only perform transformation if ARG0 is easily inverted. */
9196 tem = invert_truthvalue (arg0);
9197 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9198 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
9199 TREE_OPERAND (t, 2)));
9202 /* Convert A ? 1 : B into A || B if A and B are truth values. */
9203 if (integer_onep (arg1)
9204 && truth_value_p (TREE_CODE (arg0))
9205 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
9206 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
9207 TREE_OPERAND (t, 2)));
9212 /* When pedantic, a compound expression can be neither an lvalue
9213 nor an integer constant expression. */
9214 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9216 /* Don't let (0, 0) be null pointer constant. */
9217 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9218 : fold_convert (type, arg1);
9219 return pedantic_non_lvalue (tem);
9223 return build_complex (type, arg0, arg1);
9227 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9229 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9230 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
9231 TREE_OPERAND (arg0, 1));
9232 else if (TREE_CODE (arg0) == COMPLEX_CST)
9233 return TREE_REALPART (arg0);
9234 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9235 return fold (build2 (TREE_CODE (arg0), type,
9236 fold (build1 (REALPART_EXPR, type,
9237 TREE_OPERAND (arg0, 0))),
9238 fold (build1 (REALPART_EXPR, type,
9239 TREE_OPERAND (arg0, 1)))));
9243 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
9244 return fold_convert (type, integer_zero_node);
9245 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
9246 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
9247 TREE_OPERAND (arg0, 0));
9248 else if (TREE_CODE (arg0) == COMPLEX_CST)
9249 return TREE_IMAGPART (arg0);
9250 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9251 return fold (build2 (TREE_CODE (arg0), type,
9252 fold (build1 (IMAGPART_EXPR, type,
9253 TREE_OPERAND (arg0, 0))),
9254 fold (build1 (IMAGPART_EXPR, type,
9255 TREE_OPERAND (arg0, 1)))));
9259 /* Check for a built-in function. */
9260 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
9261 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
9263 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
9265 tree tmp = fold_builtin (t, false);
9273 } /* switch (code) */
9276 #ifdef ENABLE_FOLD_CHECKING
9279 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9280 static void fold_check_failed (tree, tree);
9281 void print_fold_checksum (tree);
9283 /* When --enable-checking=fold, compute a digest of expr before
9284 and after actual fold call to see if fold did not accidentally
9285 change original expr. */
9292 unsigned char checksum_before[16], checksum_after[16];
9295 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9296 md5_init_ctx (&ctx);
9297 fold_checksum_tree (expr, &ctx, ht);
9298 md5_finish_ctx (&ctx, checksum_before);
9301 ret = fold_1 (expr);
9303 md5_init_ctx (&ctx);
9304 fold_checksum_tree (expr, &ctx, ht);
9305 md5_finish_ctx (&ctx, checksum_after);
9308 if (memcmp (checksum_before, checksum_after, 16))
9309 fold_check_failed (expr, ret);
9315 print_fold_checksum (tree expr)
9318 unsigned char checksum[16], cnt;
9321 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9322 md5_init_ctx (&ctx);
9323 fold_checksum_tree (expr, &ctx, ht);
9324 md5_finish_ctx (&ctx, checksum);
9326 for (cnt = 0; cnt < 16; ++cnt)
9327 fprintf (stderr, "%02x", checksum[cnt]);
9328 putc ('\n', stderr);
9332 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9334 internal_error ("fold check: original tree changed by fold");
9338 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9341 enum tree_code code;
9342 char buf[sizeof (struct tree_decl)];
9345 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
9346 <= sizeof (struct tree_decl))
9347 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
9350 slot = htab_find_slot (ht, expr, INSERT);
9354 code = TREE_CODE (expr);
9355 if (TREE_CODE_CLASS (code) == tcc_declaration
9356 && DECL_ASSEMBLER_NAME_SET_P (expr))
9358 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9359 memcpy (buf, expr, tree_size (expr));
9361 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9363 else if (TREE_CODE_CLASS (code) == tcc_type
9364 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
9365 || TYPE_CACHED_VALUES_P (expr)))
9367 /* Allow these fields to be modified. */
9368 memcpy (buf, expr, tree_size (expr));
9370 TYPE_POINTER_TO (expr) = NULL;
9371 TYPE_REFERENCE_TO (expr) = NULL;
9372 TYPE_CACHED_VALUES_P (expr) = 0;
9373 TYPE_CACHED_VALUES (expr) = NULL;
9375 md5_process_bytes (expr, tree_size (expr), ctx);
9376 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9377 if (TREE_CODE_CLASS (code) != tcc_type
9378 && TREE_CODE_CLASS (code) != tcc_declaration)
9379 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9380 switch (TREE_CODE_CLASS (code))
9386 md5_process_bytes (TREE_STRING_POINTER (expr),
9387 TREE_STRING_LENGTH (expr), ctx);
9390 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9391 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9394 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9400 case tcc_exceptional:
9404 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9405 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9408 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9409 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9415 case tcc_expression:
9417 case tcc_comparison:
9421 len = TREE_CODE_LENGTH (code);
9422 for (i = 0; i < len; ++i)
9423 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9425 case tcc_declaration:
9426 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9427 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9428 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9429 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9430 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9431 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9432 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9433 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9434 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9435 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9436 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9439 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9440 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9441 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9442 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9443 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9444 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9445 if (INTEGRAL_TYPE_P (expr)
9446 || SCALAR_FLOAT_TYPE_P (expr))
9448 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9449 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9451 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9452 if (TREE_CODE (expr) == RECORD_TYPE
9453 || TREE_CODE (expr) == UNION_TYPE
9454 || TREE_CODE (expr) == QUAL_UNION_TYPE)
9455 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9456 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9465 /* Perform constant folding and related simplification of initializer
9466 expression EXPR. This behaves identically to "fold" but ignores
9467 potential run-time traps and exceptions that fold must preserve. */
9470 fold_initializer (tree expr)
9472 int saved_signaling_nans = flag_signaling_nans;
9473 int saved_trapping_math = flag_trapping_math;
9474 int saved_trapv = flag_trapv;
9477 flag_signaling_nans = 0;
9478 flag_trapping_math = 0;
9481 result = fold (expr);
9483 flag_signaling_nans = saved_signaling_nans;
9484 flag_trapping_math = saved_trapping_math;
9485 flag_trapv = saved_trapv;
9490 /* Determine if first argument is a multiple of second argument. Return 0 if
9491 it is not, or we cannot easily determined it to be.
9493 An example of the sort of thing we care about (at this point; this routine
9494 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9495 fold cases do now) is discovering that
9497 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9503 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9505 This code also handles discovering that
9507 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9509 is a multiple of 8 so we don't have to worry about dealing with a
9512 Note that we *look* inside a SAVE_EXPR only to determine how it was
9513 calculated; it is not safe for fold to do much of anything else with the
9514 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9515 at run time. For example, the latter example above *cannot* be implemented
9516 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9517 evaluation time of the original SAVE_EXPR is not necessarily the same at
9518 the time the new expression is evaluated. The only optimization of this
9519 sort that would be valid is changing
9521 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9525 SAVE_EXPR (I) * SAVE_EXPR (J)
9527 (where the same SAVE_EXPR (J) is used in the original and the
9528 transformed version). */
9531 multiple_of_p (tree type, tree top, tree bottom)
9533 if (operand_equal_p (top, bottom, 0))
9536 if (TREE_CODE (type) != INTEGER_TYPE)
9539 switch (TREE_CODE (top))
9542 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9543 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9547 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9548 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9551 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9555 op1 = TREE_OPERAND (top, 1);
9556 /* const_binop may not detect overflow correctly,
9557 so check for it explicitly here. */
9558 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9559 > TREE_INT_CST_LOW (op1)
9560 && TREE_INT_CST_HIGH (op1) == 0
9561 && 0 != (t1 = fold_convert (type,
9562 const_binop (LSHIFT_EXPR,
9565 && ! TREE_OVERFLOW (t1))
9566 return multiple_of_p (type, t1, bottom);
9571 /* Can't handle conversions from non-integral or wider integral type. */
9572 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9573 || (TYPE_PRECISION (type)
9574 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9577 /* .. fall through ... */
9580 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9583 if (TREE_CODE (bottom) != INTEGER_CST
9584 || (TYPE_UNSIGNED (type)
9585 && (tree_int_cst_sgn (top) < 0
9586 || tree_int_cst_sgn (bottom) < 0)))
9588 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9596 /* Return true if `t' is known to be non-negative. */
9599 tree_expr_nonnegative_p (tree t)
9601 switch (TREE_CODE (t))
9607 return tree_int_cst_sgn (t) >= 0;
9610 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9613 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9614 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9615 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9617 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9618 both unsigned and at least 2 bits shorter than the result. */
9619 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9620 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9621 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9623 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9624 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9625 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9626 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9628 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9629 TYPE_PRECISION (inner2)) + 1;
9630 return prec < TYPE_PRECISION (TREE_TYPE (t));
9636 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9638 /* x * x for floating point x is always non-negative. */
9639 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9641 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9642 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9645 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9646 both unsigned and their total bits is shorter than the result. */
9647 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9648 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9649 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9651 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9652 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9653 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9654 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9655 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9656 < TYPE_PRECISION (TREE_TYPE (t));
9660 case TRUNC_DIV_EXPR:
9662 case FLOOR_DIV_EXPR:
9663 case ROUND_DIV_EXPR:
9664 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9665 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9667 case TRUNC_MOD_EXPR:
9669 case FLOOR_MOD_EXPR:
9670 case ROUND_MOD_EXPR:
9671 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9674 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9675 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9678 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9679 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9682 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9683 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9687 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9688 tree outer_type = TREE_TYPE (t);
9690 if (TREE_CODE (outer_type) == REAL_TYPE)
9692 if (TREE_CODE (inner_type) == REAL_TYPE)
9693 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9694 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9696 if (TYPE_UNSIGNED (inner_type))
9698 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9701 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9703 if (TREE_CODE (inner_type) == REAL_TYPE)
9704 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9705 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9706 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9707 && TYPE_UNSIGNED (inner_type);
9713 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9714 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9716 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9718 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9719 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9721 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9722 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9724 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9726 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9728 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9729 case NON_LVALUE_EXPR:
9730 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9732 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9736 tree temp = TARGET_EXPR_SLOT (t);
9737 t = TARGET_EXPR_INITIAL (t);
9739 /* If the initializer is non-void, then it's a normal expression
9740 that will be assigned to the slot. */
9741 if (!VOID_TYPE_P (t))
9742 return tree_expr_nonnegative_p (t);
9744 /* Otherwise, the initializer sets the slot in some way. One common
9745 way is an assignment statement at the end of the initializer. */
9748 if (TREE_CODE (t) == BIND_EXPR)
9749 t = expr_last (BIND_EXPR_BODY (t));
9750 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9751 || TREE_CODE (t) == TRY_CATCH_EXPR)
9752 t = expr_last (TREE_OPERAND (t, 0));
9753 else if (TREE_CODE (t) == STATEMENT_LIST)
9758 if (TREE_CODE (t) == MODIFY_EXPR
9759 && TREE_OPERAND (t, 0) == temp)
9760 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9767 tree fndecl = get_callee_fndecl (t);
9768 tree arglist = TREE_OPERAND (t, 1);
9770 && DECL_BUILT_IN (fndecl)
9771 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9772 switch (DECL_FUNCTION_CODE (fndecl))
9774 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9775 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9776 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9777 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9779 CASE_BUILTIN_F (BUILT_IN_ACOS)
9780 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9781 CASE_BUILTIN_F (BUILT_IN_CABS)
9782 CASE_BUILTIN_F (BUILT_IN_COSH)
9783 CASE_BUILTIN_F (BUILT_IN_ERFC)
9784 CASE_BUILTIN_F (BUILT_IN_EXP)
9785 CASE_BUILTIN_F (BUILT_IN_EXP10)
9786 CASE_BUILTIN_F (BUILT_IN_EXP2)
9787 CASE_BUILTIN_F (BUILT_IN_FABS)
9788 CASE_BUILTIN_F (BUILT_IN_FDIM)
9789 CASE_BUILTIN_F (BUILT_IN_FREXP)
9790 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9791 CASE_BUILTIN_F (BUILT_IN_POW10)
9792 CASE_BUILTIN_I (BUILT_IN_FFS)
9793 CASE_BUILTIN_I (BUILT_IN_PARITY)
9794 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9798 CASE_BUILTIN_F (BUILT_IN_SQRT)
9799 /* sqrt(-0.0) is -0.0. */
9800 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9802 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9804 CASE_BUILTIN_F (BUILT_IN_ASINH)
9805 CASE_BUILTIN_F (BUILT_IN_ATAN)
9806 CASE_BUILTIN_F (BUILT_IN_ATANH)
9807 CASE_BUILTIN_F (BUILT_IN_CBRT)
9808 CASE_BUILTIN_F (BUILT_IN_CEIL)
9809 CASE_BUILTIN_F (BUILT_IN_ERF)
9810 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9811 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9812 CASE_BUILTIN_F (BUILT_IN_FMOD)
9813 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9814 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9815 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9816 CASE_BUILTIN_F (BUILT_IN_LRINT)
9817 CASE_BUILTIN_F (BUILT_IN_LROUND)
9818 CASE_BUILTIN_F (BUILT_IN_MODF)
9819 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9820 CASE_BUILTIN_F (BUILT_IN_POW)
9821 CASE_BUILTIN_F (BUILT_IN_RINT)
9822 CASE_BUILTIN_F (BUILT_IN_ROUND)
9823 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9824 CASE_BUILTIN_F (BUILT_IN_SINH)
9825 CASE_BUILTIN_F (BUILT_IN_TANH)
9826 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9827 /* True if the 1st argument is nonnegative. */
9828 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9830 CASE_BUILTIN_F (BUILT_IN_FMAX)
9831 /* True if the 1st OR 2nd arguments are nonnegative. */
9832 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9833 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9835 CASE_BUILTIN_F (BUILT_IN_FMIN)
9836 /* True if the 1st AND 2nd arguments are nonnegative. */
9837 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9838 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9840 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9841 /* True if the 2nd argument is nonnegative. */
9842 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9846 #undef CASE_BUILTIN_F
9847 #undef CASE_BUILTIN_I
9851 /* ... fall through ... */
9854 if (truth_value_p (TREE_CODE (t)))
9855 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9859 /* We don't know sign of `t', so be conservative and return false. */
9863 /* Return true when T is an address and is known to be nonzero.
9864 For floating point we further ensure that T is not denormal.
9865 Similar logic is present in nonzero_address in rtlanal.h. */
9868 tree_expr_nonzero_p (tree t)
9870 tree type = TREE_TYPE (t);
9872 /* Doing something useful for floating point would need more work. */
9873 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9876 switch (TREE_CODE (t))
9879 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9880 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9883 /* We used to test for !integer_zerop here. This does not work correctly
9884 if TREE_CONSTANT_OVERFLOW (t). */
9885 return (TREE_INT_CST_LOW (t) != 0
9886 || TREE_INT_CST_HIGH (t) != 0);
9889 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9891 /* With the presence of negative values it is hard
9892 to say something. */
9893 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9894 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9896 /* One of operands must be positive and the other non-negative. */
9897 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9898 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9903 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9905 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9906 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9912 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9913 tree outer_type = TREE_TYPE (t);
9915 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9916 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9922 tree base = get_base_address (TREE_OPERAND (t, 0));
9927 /* Weak declarations may link to NULL. */
9929 return !DECL_WEAK (base);
9931 /* Constants are never weak. */
9932 if (CONSTANT_CLASS_P (base))
9939 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9940 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9943 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9944 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9947 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9949 /* When both operands are nonzero, then MAX must be too. */
9950 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9953 /* MAX where operand 0 is positive is positive. */
9954 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9956 /* MAX where operand 1 is positive is positive. */
9957 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9958 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9965 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9968 case NON_LVALUE_EXPR:
9969 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9972 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9973 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9981 /* See if we are applying CODE, a relational to the highest or lowest
9982 possible integer of TYPE. If so, then the result is a compile
9986 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9991 enum tree_code code = *code_p;
9992 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9994 if (TREE_CODE (op1) == INTEGER_CST
9995 && ! TREE_CONSTANT_OVERFLOW (op1)
9996 && width <= HOST_BITS_PER_WIDE_INT
9997 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9998 || POINTER_TYPE_P (TREE_TYPE (op1))))
10000 unsigned HOST_WIDE_INT signed_max;
10001 unsigned HOST_WIDE_INT max, min;
10003 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
10005 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
10007 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10013 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10016 if (TREE_INT_CST_HIGH (op1) == 0
10017 && TREE_INT_CST_LOW (op1) == max)
10021 return omit_one_operand (type, integer_zero_node, op0);
10027 return omit_one_operand (type, integer_one_node, op0);
10033 /* The GE_EXPR and LT_EXPR cases above are not normally
10034 reached because of previous transformations. */
10039 else if (TREE_INT_CST_HIGH (op1) == 0
10040 && TREE_INT_CST_LOW (op1) == max - 1)
10045 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10049 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
10054 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10055 && TREE_INT_CST_LOW (op1) == min)
10059 return omit_one_operand (type, integer_zero_node, op0);
10066 return omit_one_operand (type, integer_one_node, op0);
10075 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
10076 && TREE_INT_CST_LOW (op1) == min + 1)
10081 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10085 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10091 else if (TREE_INT_CST_HIGH (op1) == 0
10092 && TREE_INT_CST_LOW (op1) == signed_max
10093 && TYPE_UNSIGNED (TREE_TYPE (op1))
10094 /* signed_type does not work on pointer types. */
10095 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
10097 /* The following case also applies to X < signed_max+1
10098 and X >= signed_max+1 because previous transformations. */
10099 if (code == LE_EXPR || code == GT_EXPR)
10101 tree st0, st1, exp, retval;
10102 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
10103 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
10105 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10107 fold_convert (st0, op0),
10108 fold_convert (st1, integer_zero_node));
10110 retval = fold_binary_to_constant (TREE_CODE (exp),
10112 TREE_OPERAND (exp, 0),
10113 TREE_OPERAND (exp, 1));
10115 /* If we are in gimple form, then returning EXP would create
10116 non-gimple expressions. Clearing it is safe and insures
10117 we do not allow a non-gimple expression to escape. */
10118 if (in_gimple_form)
10121 return (retval ? retval : exp);
10130 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10131 attempt to fold the expression to a constant without modifying TYPE,
10134 If the expression could be simplified to a constant, then return
10135 the constant. If the expression would not be simplified to a
10136 constant, then return NULL_TREE.
10138 Note this is primarily designed to be called after gimplification
10139 of the tree structures and when at least one operand is a constant.
10140 As a result of those simplifying assumptions this routine is far
10141 simpler than the generic fold routine. */
10144 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10151 /* If this is a commutative operation, and ARG0 is a constant, move it
10152 to ARG1 to reduce the number of tests below. */
10153 if (commutative_tree_code (code)
10154 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
10161 /* If either operand is a complex type, extract its real component. */
10162 if (TREE_CODE (op0) == COMPLEX_CST)
10163 subop0 = TREE_REALPART (op0);
10167 if (TREE_CODE (op1) == COMPLEX_CST)
10168 subop1 = TREE_REALPART (op1);
10172 /* Note if either argument is not a real or integer constant.
10173 With a few exceptions, simplification is limited to cases
10174 where both arguments are constants. */
10175 if ((TREE_CODE (subop0) != INTEGER_CST
10176 && TREE_CODE (subop0) != REAL_CST)
10177 || (TREE_CODE (subop1) != INTEGER_CST
10178 && TREE_CODE (subop1) != REAL_CST))
10184 /* (plus (address) (const_int)) is a constant. */
10185 if (TREE_CODE (op0) == PLUS_EXPR
10186 && TREE_CODE (op1) == INTEGER_CST
10187 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
10188 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
10189 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
10191 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
10193 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
10194 const_binop (PLUS_EXPR, op1,
10195 TREE_OPERAND (op0, 1), 0));
10203 /* Both arguments are constants. Simplify. */
10204 tem = const_binop (code, op0, op1, 0);
10205 if (tem != NULL_TREE)
10207 /* The return value should always have the same type as
10208 the original expression. */
10209 if (TREE_TYPE (tem) != type)
10210 tem = fold_convert (type, tem);
10217 /* Fold &x - &x. This can happen from &x.foo - &x.
10218 This is unsafe for certain floats even in non-IEEE formats.
10219 In IEEE, it is unsafe because it does wrong for NaNs.
10220 Also note that operand_equal_p is always false if an
10221 operand is volatile. */
10222 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
10223 return fold_convert (type, integer_zero_node);
10229 /* Special case multiplication or bitwise AND where one argument
10231 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
10232 return omit_one_operand (type, op1, op0);
10234 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
10235 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
10236 && real_zerop (op1))
10237 return omit_one_operand (type, op1, op0);
10242 /* Special case when we know the result will be all ones. */
10243 if (integer_all_onesp (op1))
10244 return omit_one_operand (type, op1, op0);
10248 case TRUNC_DIV_EXPR:
10249 case ROUND_DIV_EXPR:
10250 case FLOOR_DIV_EXPR:
10251 case CEIL_DIV_EXPR:
10252 case EXACT_DIV_EXPR:
10253 case TRUNC_MOD_EXPR:
10254 case ROUND_MOD_EXPR:
10255 case FLOOR_MOD_EXPR:
10256 case CEIL_MOD_EXPR:
10258 /* Division by zero is undefined. */
10259 if (integer_zerop (op1))
10262 if (TREE_CODE (op1) == REAL_CST
10263 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10264 && real_zerop (op1))
10270 if (INTEGRAL_TYPE_P (type)
10271 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10272 return omit_one_operand (type, op1, op0);
10277 if (INTEGRAL_TYPE_P (type)
10278 && TYPE_MAX_VALUE (type)
10279 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10280 return omit_one_operand (type, op1, op0);
10285 /* Optimize -1 >> x for arithmetic right shifts. */
10286 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10287 return omit_one_operand (type, op0, op1);
10288 /* ... fall through ... */
10291 if (integer_zerop (op0))
10292 return omit_one_operand (type, op0, op1);
10294 /* Since negative shift count is not well-defined, don't
10295 try to compute it in the compiler. */
10296 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10303 /* -1 rotated either direction by any amount is still -1. */
10304 if (integer_all_onesp (op0))
10305 return omit_one_operand (type, op0, op1);
10307 /* 0 rotated either direction by any amount is still zero. */
10308 if (integer_zerop (op0))
10309 return omit_one_operand (type, op0, op1);
10315 return build_complex (type, op0, op1);
10324 /* If one arg is a real or integer constant, put it last. */
10325 if ((TREE_CODE (op0) == INTEGER_CST
10326 && TREE_CODE (op1) != INTEGER_CST)
10327 || (TREE_CODE (op0) == REAL_CST
10328 && TREE_CODE (op0) != REAL_CST))
10335 code = swap_tree_comparison (code);
10338 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10339 This transformation affects the cases which are handled in later
10340 optimizations involving comparisons with non-negative constants. */
10341 if (TREE_CODE (op1) == INTEGER_CST
10342 && TREE_CODE (op0) != INTEGER_CST
10343 && tree_int_cst_sgn (op1) > 0)
10349 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10354 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10362 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10366 /* Fall through. */
10369 case UNORDERED_EXPR:
10379 return fold_relational_const (code, type, op0, op1);
10382 /* This could probably be handled. */
10385 case TRUTH_AND_EXPR:
10386 /* If second arg is constant zero, result is zero, but first arg
10387 must be evaluated. */
10388 if (integer_zerop (op1))
10389 return omit_one_operand (type, op1, op0);
10390 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10391 case will be handled here. */
10392 if (integer_zerop (op0))
10393 return omit_one_operand (type, op0, op1);
10394 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10395 return constant_boolean_node (true, type);
10398 case TRUTH_OR_EXPR:
10399 /* If second arg is constant true, result is true, but we must
10400 evaluate first arg. */
10401 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10402 return omit_one_operand (type, op1, op0);
10403 /* Likewise for first arg, but note this only occurs here for
10405 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10406 return omit_one_operand (type, op0, op1);
10407 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10408 return constant_boolean_node (false, type);
10411 case TRUTH_XOR_EXPR:
10412 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10414 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10415 return constant_boolean_node (x, type);
10424 /* Given the components of a unary expression CODE, TYPE and OP0,
10425 attempt to fold the expression to a constant without modifying
10428 If the expression could be simplified to a constant, then return
10429 the constant. If the expression would not be simplified to a
10430 constant, then return NULL_TREE.
10432 Note this is primarily designed to be called after gimplification
10433 of the tree structures and when op0 is a constant. As a result
10434 of those simplifying assumptions this routine is far simpler than
10435 the generic fold routine. */
10438 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10440 /* Make sure we have a suitable constant argument. */
10441 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10445 if (TREE_CODE (op0) == COMPLEX_CST)
10446 subop = TREE_REALPART (op0);
10450 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10459 case FIX_TRUNC_EXPR:
10460 case FIX_FLOOR_EXPR:
10461 case FIX_CEIL_EXPR:
10462 return fold_convert_const (code, type, op0);
10465 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10466 return fold_negate_const (op0, type);
10471 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10472 return fold_abs_const (op0, type);
10477 if (TREE_CODE (op0) == INTEGER_CST)
10478 return fold_not_const (op0, type);
10482 case REALPART_EXPR:
10483 if (TREE_CODE (op0) == COMPLEX_CST)
10484 return TREE_REALPART (op0);
10488 case IMAGPART_EXPR:
10489 if (TREE_CODE (op0) == COMPLEX_CST)
10490 return TREE_IMAGPART (op0);
10495 if (TREE_CODE (op0) == COMPLEX_CST
10496 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10497 return build_complex (type, TREE_REALPART (op0),
10498 negate_expr (TREE_IMAGPART (op0)));
10506 /* If EXP represents referencing an element in a constant string
10507 (either via pointer arithmetic or array indexing), return the
10508 tree representing the value accessed, otherwise return NULL. */
10511 fold_read_from_constant_string (tree exp)
10513 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10515 tree exp1 = TREE_OPERAND (exp, 0);
10519 if (TREE_CODE (exp) == INDIRECT_REF)
10520 string = string_constant (exp1, &index);
10523 tree low_bound = array_ref_low_bound (exp);
10524 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10526 /* Optimize the special-case of a zero lower bound.
10528 We convert the low_bound to sizetype to avoid some problems
10529 with constant folding. (E.g. suppose the lower bound is 1,
10530 and its mode is QI. Without the conversion,l (ARRAY
10531 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10532 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10533 if (! integer_zerop (low_bound))
10534 index = size_diffop (index, fold_convert (sizetype, low_bound));
10540 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10541 && TREE_CODE (string) == STRING_CST
10542 && TREE_CODE (index) == INTEGER_CST
10543 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10544 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10546 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10547 return fold_convert (TREE_TYPE (exp),
10548 build_int_cst (NULL_TREE,
10549 (TREE_STRING_POINTER (string)
10550 [TREE_INT_CST_LOW (index)])));
10555 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10556 an integer constant or real constant.
10558 TYPE is the type of the result. */
10561 fold_negate_const (tree arg0, tree type)
10563 tree t = NULL_TREE;
10565 switch (TREE_CODE (arg0))
10569 unsigned HOST_WIDE_INT low;
10570 HOST_WIDE_INT high;
10571 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10572 TREE_INT_CST_HIGH (arg0),
10574 t = build_int_cst_wide (type, low, high);
10575 t = force_fit_type (t, 1,
10576 (overflow | TREE_OVERFLOW (arg0))
10577 && !TYPE_UNSIGNED (type),
10578 TREE_CONSTANT_OVERFLOW (arg0));
10583 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10587 gcc_unreachable ();
10593 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10594 an integer constant or real constant.
10596 TYPE is the type of the result. */
10599 fold_abs_const (tree arg0, tree type)
10601 tree t = NULL_TREE;
10603 switch (TREE_CODE (arg0))
10606 /* If the value is unsigned, then the absolute value is
10607 the same as the ordinary value. */
10608 if (TYPE_UNSIGNED (type))
10610 /* Similarly, if the value is non-negative. */
10611 else if (INT_CST_LT (integer_minus_one_node, arg0))
10613 /* If the value is negative, then the absolute value is
10617 unsigned HOST_WIDE_INT low;
10618 HOST_WIDE_INT high;
10619 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10620 TREE_INT_CST_HIGH (arg0),
10622 t = build_int_cst_wide (type, low, high);
10623 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
10624 TREE_CONSTANT_OVERFLOW (arg0));
10629 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10630 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10636 gcc_unreachable ();
10642 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10643 constant. TYPE is the type of the result. */
10646 fold_not_const (tree arg0, tree type)
10648 tree t = NULL_TREE;
10650 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
10652 t = build_int_cst_wide (type,
10653 ~ TREE_INT_CST_LOW (arg0),
10654 ~ TREE_INT_CST_HIGH (arg0));
10655 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
10656 TREE_CONSTANT_OVERFLOW (arg0));
10661 /* Given CODE, a relational operator, the target type, TYPE and two
10662 constant operands OP0 and OP1, return the result of the
10663 relational operation. If the result is not a compile time
10664 constant, then return NULL_TREE. */
10667 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10669 int result, invert;
10671 /* From here on, the only cases we handle are when the result is
10672 known to be a constant. */
10674 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10676 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10677 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10679 /* Handle the cases where either operand is a NaN. */
10680 if (real_isnan (c0) || real_isnan (c1))
10690 case UNORDERED_EXPR:
10704 if (flag_trapping_math)
10710 gcc_unreachable ();
10713 return constant_boolean_node (result, type);
10716 return constant_boolean_node (real_compare (code, c0, c1), type);
10719 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10721 To compute GT, swap the arguments and do LT.
10722 To compute GE, do LT and invert the result.
10723 To compute LE, swap the arguments, do LT and invert the result.
10724 To compute NE, do EQ and invert the result.
10726 Therefore, the code below must handle only EQ and LT. */
10728 if (code == LE_EXPR || code == GT_EXPR)
10733 code = swap_tree_comparison (code);
10736 /* Note that it is safe to invert for real values here because we
10737 have already handled the one case that it matters. */
10740 if (code == NE_EXPR || code == GE_EXPR)
10743 code = invert_tree_comparison (code, false);
10746 /* Compute a result for LT or EQ if args permit;
10747 Otherwise return T. */
10748 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10750 if (code == EQ_EXPR)
10751 result = tree_int_cst_equal (op0, op1);
10752 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10753 result = INT_CST_LT_UNSIGNED (op0, op1);
10755 result = INT_CST_LT (op0, op1);
10762 return constant_boolean_node (result, type);
10765 /* Build an expression for the a clean point containing EXPR with type TYPE.
10766 Don't build a cleanup point expression for EXPR which don't have side
10770 fold_build_cleanup_point_expr (tree type, tree expr)
10772 /* If the expression does not have side effects then we don't have to wrap
10773 it with a cleanup point expression. */
10774 if (!TREE_SIDE_EFFECTS (expr))
10777 return build1 (CLEANUP_POINT_EXPR, type, expr);
10780 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10781 avoid confusing the gimplify process. */
10784 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10786 /* The size of the object is not relevant when talking about its address. */
10787 if (TREE_CODE (t) == WITH_SIZE_EXPR)
10788 t = TREE_OPERAND (t, 0);
10790 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
10791 if (TREE_CODE (t) == INDIRECT_REF
10792 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
10794 t = TREE_OPERAND (t, 0);
10795 if (TREE_TYPE (t) != ptrtype)
10796 t = build1 (NOP_EXPR, ptrtype, t);
10802 while (handled_component_p (base))
10803 base = TREE_OPERAND (base, 0);
10805 TREE_ADDRESSABLE (base) = 1;
10807 t = build1 (ADDR_EXPR, ptrtype, t);
10814 build_fold_addr_expr (tree t)
10816 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10819 /* Builds an expression for an indirection through T, simplifying some
10823 build_fold_indirect_ref (tree t)
10825 tree type = TREE_TYPE (TREE_TYPE (t));
10830 if (TREE_CODE (sub) == ADDR_EXPR)
10832 tree op = TREE_OPERAND (sub, 0);
10833 tree optype = TREE_TYPE (op);
10835 if (lang_hooks.types_compatible_p (type, optype))
10837 /* *(foo *)&fooarray => fooarray[0] */
10838 else if (TREE_CODE (optype) == ARRAY_TYPE
10839 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10840 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10843 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10844 subtype = TREE_TYPE (sub);
10845 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10846 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10848 sub = build_fold_indirect_ref (sub);
10849 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10852 return build1 (INDIRECT_REF, type, t);
10855 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10856 whose result is ignored. The type of the returned tree need not be
10857 the same as the original expression. */
10860 fold_ignored_result (tree t)
10862 if (!TREE_SIDE_EFFECTS (t))
10863 return integer_zero_node;
10866 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10869 t = TREE_OPERAND (t, 0);
10873 case tcc_comparison:
10874 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10875 t = TREE_OPERAND (t, 0);
10876 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10877 t = TREE_OPERAND (t, 1);
10882 case tcc_expression:
10883 switch (TREE_CODE (t))
10885 case COMPOUND_EXPR:
10886 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10888 t = TREE_OPERAND (t, 0);
10892 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10893 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10895 t = TREE_OPERAND (t, 0);
10908 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
10909 This can only be applied to objects of a sizetype. */
10912 round_up (tree value, int divisor)
10914 tree div = NULL_TREE;
10916 gcc_assert (divisor > 0);
10920 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10921 have to do anything. Only do this when we are not given a const,
10922 because in that case, this check is more expensive than just
10924 if (TREE_CODE (value) != INTEGER_CST)
10926 div = build_int_cst (TREE_TYPE (value), divisor);
10928 if (multiple_of_p (TREE_TYPE (value), value, div))
10932 /* If divisor is a power of two, simplify this to bit manipulation. */
10933 if (divisor == (divisor & -divisor))
10937 t = build_int_cst (TREE_TYPE (value), divisor - 1);
10938 value = size_binop (PLUS_EXPR, value, t);
10939 t = build_int_cst (TREE_TYPE (value), -divisor);
10940 value = size_binop (BIT_AND_EXPR, value, t);
10945 div = build_int_cst (TREE_TYPE (value), divisor);
10946 value = size_binop (CEIL_DIV_EXPR, value, div);
10947 value = size_binop (MULT_EXPR, value, div);
10953 /* Likewise, but round down. */
10956 round_down (tree value, int divisor)
10958 tree div = NULL_TREE;
10960 gcc_assert (divisor > 0);
10964 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
10965 have to do anything. Only do this when we are not given a const,
10966 because in that case, this check is more expensive than just
10968 if (TREE_CODE (value) != INTEGER_CST)
10970 div = build_int_cst (TREE_TYPE (value), divisor);
10972 if (multiple_of_p (TREE_TYPE (value), value, div))
10976 /* If divisor is a power of two, simplify this to bit manipulation. */
10977 if (divisor == (divisor & -divisor))
10981 t = build_int_cst (TREE_TYPE (value), -divisor);
10982 value = size_binop (BIT_AND_EXPR, value, t);
10987 div = build_int_cst (TREE_TYPE (value), divisor);
10988 value = size_binop (FLOOR_DIV_EXPR, value, div);
10989 value = size_binop (MULT_EXPR, value, div);
10995 /* Returns the pointer to the base of the object addressed by EXP and
10996 extracts the information about the offset of the access, storing it
10997 to PBITPOS and POFFSET. */
11000 split_address_to_core_and_offset (tree exp,
11001 HOST_WIDE_INT *pbitpos, tree *poffset)
11004 enum machine_mode mode;
11005 int unsignedp, volatilep;
11006 HOST_WIDE_INT bitsize;
11008 if (TREE_CODE (exp) == ADDR_EXPR)
11010 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11011 poffset, &mode, &unsignedp, &volatilep);
11013 if (TREE_CODE (core) == INDIRECT_REF)
11014 core = TREE_OPERAND (core, 0);
11020 *poffset = NULL_TREE;
11026 /* Returns true if addresses of E1 and E2 differ by a constant, false
11027 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11030 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11033 HOST_WIDE_INT bitpos1, bitpos2;
11034 tree toffset1, toffset2, tdiff, type;
11036 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11037 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11039 if (bitpos1 % BITS_PER_UNIT != 0
11040 || bitpos2 % BITS_PER_UNIT != 0
11041 || !operand_equal_p (core1, core2, 0))
11044 if (toffset1 && toffset2)
11046 type = TREE_TYPE (toffset1);
11047 if (type != TREE_TYPE (toffset2))
11048 toffset2 = fold_convert (type, toffset2);
11050 tdiff = fold (build2 (MINUS_EXPR, type, toffset1, toffset2));
11051 if (!host_integerp (tdiff, 0))
11054 *diff = tree_low_cst (tdiff, 0);
11056 else if (toffset1 || toffset2)
11058 /* If only one of the offsets is non-constant, the difference cannot
11065 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;