1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum tree_code swap_tree_comparison (enum tree_code);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree make_range (tree, int *, tree *, tree *);
116 static tree build_range_check (tree, tree, int, tree, tree);
117 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
119 static tree fold_range_test (tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static bool tree_swap_operands_p (tree, tree, bool);
137 static tree fold_negate_const (tree, tree);
138 static tree fold_abs_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static tree fold_relational_hi_lo (enum tree_code *, const tree,
144 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
145 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
146 and SUM1. Then this yields nonzero if overflow occurred during the
149 Overflow occurs if A and B have the same sign, but A and SUM differ in
150 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
152 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
154 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
155 We do that by representing the two-word integer in 4 words, with only
156 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
157 number. The value of the word is LOWPART + HIGHPART * BASE. */
160 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
161 #define HIGHPART(x) \
162 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
163 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
165 /* Unpack a two-word integer into 4 words.
166 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
167 WORDS points to the array of HOST_WIDE_INTs. */
170 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
172 words[0] = LOWPART (low);
173 words[1] = HIGHPART (low);
174 words[2] = LOWPART (hi);
175 words[3] = HIGHPART (hi);
178 /* Pack an array of 4 words into a two-word integer.
179 WORDS points to the array of words.
180 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
183 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
186 *low = words[0] + words[1] * BASE;
187 *hi = words[2] + words[3] * BASE;
190 /* Make the integer constant T valid for its type by setting to 0 or 1 all
191 the bits in the constant that don't belong in the type.
193 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
194 nonzero, a signed overflow has already occurred in calculating T, so
198 force_fit_type (tree t, int overflow)
200 unsigned HOST_WIDE_INT low;
204 if (TREE_CODE (t) == REAL_CST)
206 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
207 Consider doing it via real_convert now. */
211 else if (TREE_CODE (t) != INTEGER_CST)
214 low = TREE_INT_CST_LOW (t);
215 high = TREE_INT_CST_HIGH (t);
217 if (POINTER_TYPE_P (TREE_TYPE (t))
218 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
221 prec = TYPE_PRECISION (TREE_TYPE (t));
223 /* First clear all bits that are beyond the type's precision. */
225 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
227 else if (prec > HOST_BITS_PER_WIDE_INT)
228 TREE_INT_CST_HIGH (t)
229 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
232 TREE_INT_CST_HIGH (t) = 0;
233 if (prec < HOST_BITS_PER_WIDE_INT)
234 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
237 /* Unsigned types do not suffer sign extension or overflow unless they
239 if (TYPE_UNSIGNED (TREE_TYPE (t))
240 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
241 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
244 /* If the value's sign bit is set, extend the sign. */
245 if (prec != 2 * HOST_BITS_PER_WIDE_INT
246 && (prec > HOST_BITS_PER_WIDE_INT
247 ? 0 != (TREE_INT_CST_HIGH (t)
249 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
250 : 0 != (TREE_INT_CST_LOW (t)
251 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
253 /* Value is negative:
254 set to 1 all the bits that are outside this type's precision. */
255 if (prec > HOST_BITS_PER_WIDE_INT)
256 TREE_INT_CST_HIGH (t)
257 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
260 TREE_INT_CST_HIGH (t) = -1;
261 if (prec < HOST_BITS_PER_WIDE_INT)
262 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
266 /* Return nonzero if signed overflow occurred. */
268 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
272 /* Add two doubleword integers with doubleword result.
273 Each argument is given as two `HOST_WIDE_INT' pieces.
274 One argument is L1 and H1; the other, L2 and H2.
275 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
278 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
279 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
280 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
282 unsigned HOST_WIDE_INT l;
286 h = h1 + h2 + (l < l1);
290 return OVERFLOW_SUM_SIGN (h1, h2, h);
293 /* Negate a doubleword integer with doubleword result.
294 Return nonzero if the operation overflows, assuming it's signed.
295 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
299 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
300 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
306 return (*hv & h1) < 0;
316 /* Multiply two doubleword integers with doubleword result.
317 Return nonzero if the operation overflows, assuming it's signed.
318 Each argument is given as two `HOST_WIDE_INT' pieces.
319 One argument is L1 and H1; the other, L2 and H2.
320 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
323 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
324 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
325 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
327 HOST_WIDE_INT arg1[4];
328 HOST_WIDE_INT arg2[4];
329 HOST_WIDE_INT prod[4 * 2];
330 unsigned HOST_WIDE_INT carry;
332 unsigned HOST_WIDE_INT toplow, neglow;
333 HOST_WIDE_INT tophigh, neghigh;
335 encode (arg1, l1, h1);
336 encode (arg2, l2, h2);
338 memset (prod, 0, sizeof prod);
340 for (i = 0; i < 4; i++)
343 for (j = 0; j < 4; j++)
346 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
347 carry += arg1[i] * arg2[j];
348 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
350 prod[k] = LOWPART (carry);
351 carry = HIGHPART (carry);
356 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
358 /* Check for overflow by calculating the top half of the answer in full;
359 it should agree with the low half's sign bit. */
360 decode (prod + 4, &toplow, &tophigh);
363 neg_double (l2, h2, &neglow, &neghigh);
364 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
368 neg_double (l1, h1, &neglow, &neghigh);
369 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
371 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
374 /* Shift the doubleword integer in L1, H1 left by COUNT places
375 keeping only PREC bits of result.
376 Shift right if COUNT is negative.
377 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
378 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
381 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
382 HOST_WIDE_INT count, unsigned int prec,
383 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
385 unsigned HOST_WIDE_INT signmask;
389 rshift_double (l1, h1, -count, prec, lv, hv, arith);
393 if (SHIFT_COUNT_TRUNCATED)
396 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
398 /* Shifting by the host word size is undefined according to the
399 ANSI standard, so we must handle this as a special case. */
403 else if (count >= HOST_BITS_PER_WIDE_INT)
405 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
410 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
411 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
415 /* Sign extend all bits that are beyond the precision. */
417 signmask = -((prec > HOST_BITS_PER_WIDE_INT
418 ? ((unsigned HOST_WIDE_INT) *hv
419 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
420 : (*lv >> (prec - 1))) & 1);
422 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
424 else if (prec >= HOST_BITS_PER_WIDE_INT)
426 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
427 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
432 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
433 *lv |= signmask << prec;
437 /* Shift the doubleword integer in L1, H1 right by COUNT places
438 keeping only PREC bits of result. COUNT must be positive.
439 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
440 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
443 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
444 HOST_WIDE_INT count, unsigned int prec,
445 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
448 unsigned HOST_WIDE_INT signmask;
451 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
454 if (SHIFT_COUNT_TRUNCATED)
457 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
459 /* Shifting by the host word size is undefined according to the
460 ANSI standard, so we must handle this as a special case. */
464 else if (count >= HOST_BITS_PER_WIDE_INT)
467 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
471 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
473 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
476 /* Zero / sign extend all bits that are beyond the precision. */
478 if (count >= (HOST_WIDE_INT)prec)
483 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
485 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
487 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
488 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
493 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
494 *lv |= signmask << (prec - count);
498 /* Rotate the doubleword integer in L1, H1 left by COUNT places
499 keeping only PREC bits of result.
500 Rotate right if COUNT is negative.
501 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
504 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
505 HOST_WIDE_INT count, unsigned int prec,
506 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
508 unsigned HOST_WIDE_INT s1l, s2l;
509 HOST_WIDE_INT s1h, s2h;
515 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
516 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
521 /* Rotate the doubleword integer in L1, H1 left by COUNT places
522 keeping only PREC bits of result. COUNT must be positive.
523 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
526 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
527 HOST_WIDE_INT count, unsigned int prec,
528 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
530 unsigned HOST_WIDE_INT s1l, s2l;
531 HOST_WIDE_INT s1h, s2h;
537 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
538 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
543 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
544 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
545 CODE is a tree code for a kind of division, one of
546 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
548 It controls how the quotient is rounded to an integer.
549 Return nonzero if the operation overflows.
550 UNS nonzero says do unsigned division. */
553 div_and_round_double (enum tree_code code, int uns,
554 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
555 HOST_WIDE_INT hnum_orig,
556 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
557 HOST_WIDE_INT hden_orig,
558 unsigned HOST_WIDE_INT *lquo,
559 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
563 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
564 HOST_WIDE_INT den[4], quo[4];
566 unsigned HOST_WIDE_INT work;
567 unsigned HOST_WIDE_INT carry = 0;
568 unsigned HOST_WIDE_INT lnum = lnum_orig;
569 HOST_WIDE_INT hnum = hnum_orig;
570 unsigned HOST_WIDE_INT lden = lden_orig;
571 HOST_WIDE_INT hden = hden_orig;
574 if (hden == 0 && lden == 0)
575 overflow = 1, lden = 1;
577 /* Calculate quotient sign and convert operands to unsigned. */
583 /* (minimum integer) / (-1) is the only overflow case. */
584 if (neg_double (lnum, hnum, &lnum, &hnum)
585 && ((HOST_WIDE_INT) lden & hden) == -1)
591 neg_double (lden, hden, &lden, &hden);
595 if (hnum == 0 && hden == 0)
596 { /* single precision */
598 /* This unsigned division rounds toward zero. */
604 { /* trivial case: dividend < divisor */
605 /* hden != 0 already checked. */
612 memset (quo, 0, sizeof quo);
614 memset (num, 0, sizeof num); /* to zero 9th element */
615 memset (den, 0, sizeof den);
617 encode (num, lnum, hnum);
618 encode (den, lden, hden);
620 /* Special code for when the divisor < BASE. */
621 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
623 /* hnum != 0 already checked. */
624 for (i = 4 - 1; i >= 0; i--)
626 work = num[i] + carry * BASE;
627 quo[i] = work / lden;
633 /* Full double precision division,
634 with thanks to Don Knuth's "Seminumerical Algorithms". */
635 int num_hi_sig, den_hi_sig;
636 unsigned HOST_WIDE_INT quo_est, scale;
638 /* Find the highest nonzero divisor digit. */
639 for (i = 4 - 1;; i--)
646 /* Insure that the first digit of the divisor is at least BASE/2.
647 This is required by the quotient digit estimation algorithm. */
649 scale = BASE / (den[den_hi_sig] + 1);
651 { /* scale divisor and dividend */
653 for (i = 0; i <= 4 - 1; i++)
655 work = (num[i] * scale) + carry;
656 num[i] = LOWPART (work);
657 carry = HIGHPART (work);
662 for (i = 0; i <= 4 - 1; i++)
664 work = (den[i] * scale) + carry;
665 den[i] = LOWPART (work);
666 carry = HIGHPART (work);
667 if (den[i] != 0) den_hi_sig = i;
674 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
676 /* Guess the next quotient digit, quo_est, by dividing the first
677 two remaining dividend digits by the high order quotient digit.
678 quo_est is never low and is at most 2 high. */
679 unsigned HOST_WIDE_INT tmp;
681 num_hi_sig = i + den_hi_sig + 1;
682 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
683 if (num[num_hi_sig] != den[den_hi_sig])
684 quo_est = work / den[den_hi_sig];
688 /* Refine quo_est so it's usually correct, and at most one high. */
689 tmp = work - quo_est * den[den_hi_sig];
691 && (den[den_hi_sig - 1] * quo_est
692 > (tmp * BASE + num[num_hi_sig - 2])))
695 /* Try QUO_EST as the quotient digit, by multiplying the
696 divisor by QUO_EST and subtracting from the remaining dividend.
697 Keep in mind that QUO_EST is the I - 1st digit. */
700 for (j = 0; j <= den_hi_sig; j++)
702 work = quo_est * den[j] + carry;
703 carry = HIGHPART (work);
704 work = num[i + j] - LOWPART (work);
705 num[i + j] = LOWPART (work);
706 carry += HIGHPART (work) != 0;
709 /* If quo_est was high by one, then num[i] went negative and
710 we need to correct things. */
711 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
714 carry = 0; /* add divisor back in */
715 for (j = 0; j <= den_hi_sig; j++)
717 work = num[i + j] + den[j] + carry;
718 carry = HIGHPART (work);
719 num[i + j] = LOWPART (work);
722 num [num_hi_sig] += carry;
725 /* Store the quotient digit. */
730 decode (quo, lquo, hquo);
733 /* If result is negative, make it so. */
735 neg_double (*lquo, *hquo, lquo, hquo);
737 /* Compute trial remainder: rem = num - (quo * den) */
738 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
739 neg_double (*lrem, *hrem, lrem, hrem);
740 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
745 case TRUNC_MOD_EXPR: /* round toward zero */
746 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
750 case FLOOR_MOD_EXPR: /* round toward negative infinity */
751 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
754 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
762 case CEIL_MOD_EXPR: /* round toward positive infinity */
763 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
765 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
773 case ROUND_MOD_EXPR: /* round to closest integer */
775 unsigned HOST_WIDE_INT labs_rem = *lrem;
776 HOST_WIDE_INT habs_rem = *hrem;
777 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
778 HOST_WIDE_INT habs_den = hden, htwice;
780 /* Get absolute values. */
782 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
784 neg_double (lden, hden, &labs_den, &habs_den);
786 /* If (2 * abs (lrem) >= abs (lden)) */
787 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
788 labs_rem, habs_rem, <wice, &htwice);
790 if (((unsigned HOST_WIDE_INT) habs_den
791 < (unsigned HOST_WIDE_INT) htwice)
792 || (((unsigned HOST_WIDE_INT) habs_den
793 == (unsigned HOST_WIDE_INT) htwice)
794 && (labs_den < ltwice)))
798 add_double (*lquo, *hquo,
799 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
802 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
814 /* Compute true remainder: rem = num - (quo * den) */
815 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
816 neg_double (*lrem, *hrem, lrem, hrem);
817 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
821 /* Return true if built-in mathematical function specified by CODE
822 preserves the sign of it argument, i.e. -f(x) == f(-x). */
825 negate_mathfn_p (enum built_in_function code)
849 /* Determine whether an expression T can be cheaply negated using
850 the function negate_expr. */
853 negate_expr_p (tree t)
855 unsigned HOST_WIDE_INT val;
862 type = TREE_TYPE (t);
865 switch (TREE_CODE (t))
868 if (TYPE_UNSIGNED (type) || ! flag_trapv)
871 /* Check that -CST will not overflow type. */
872 prec = TYPE_PRECISION (type);
873 if (prec > HOST_BITS_PER_WIDE_INT)
875 if (TREE_INT_CST_LOW (t) != 0)
877 prec -= HOST_BITS_PER_WIDE_INT;
878 val = TREE_INT_CST_HIGH (t);
881 val = TREE_INT_CST_LOW (t);
882 if (prec < HOST_BITS_PER_WIDE_INT)
883 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
884 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
891 return negate_expr_p (TREE_REALPART (t))
892 && negate_expr_p (TREE_IMAGPART (t));
895 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
897 /* -(A + B) -> (-B) - A. */
898 if (negate_expr_p (TREE_OPERAND (t, 1))
899 && reorder_operands_p (TREE_OPERAND (t, 0),
900 TREE_OPERAND (t, 1)))
902 /* -(A + B) -> (-A) - B. */
903 return negate_expr_p (TREE_OPERAND (t, 0));
906 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
907 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
908 && reorder_operands_p (TREE_OPERAND (t, 0),
909 TREE_OPERAND (t, 1));
912 if (TYPE_UNSIGNED (TREE_TYPE (t)))
918 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
919 return negate_expr_p (TREE_OPERAND (t, 1))
920 || negate_expr_p (TREE_OPERAND (t, 0));
924 /* Negate -((double)float) as (double)(-float). */
925 if (TREE_CODE (type) == REAL_TYPE)
927 tree tem = strip_float_extensions (t);
929 return negate_expr_p (tem);
934 /* Negate -f(x) as f(-x). */
935 if (negate_mathfn_p (builtin_mathfn_code (t)))
936 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
940 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
941 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
943 tree op1 = TREE_OPERAND (t, 1);
944 if (TREE_INT_CST_HIGH (op1) == 0
945 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
946 == TREE_INT_CST_LOW (op1))
957 /* Given T, an expression, return the negation of T. Allow for T to be
958 null, in which case return null. */
969 type = TREE_TYPE (t);
972 switch (TREE_CODE (t))
975 tem = fold_negate_const (t, type);
976 if (! TREE_OVERFLOW (tem)
977 || TYPE_UNSIGNED (type)
983 tem = fold_negate_const (t, type);
984 /* Two's complement FP formats, such as c4x, may overflow. */
985 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
986 return fold_convert (type, tem);
991 tree rpart = negate_expr (TREE_REALPART (t));
992 tree ipart = negate_expr (TREE_IMAGPART (t));
994 if ((TREE_CODE (rpart) == REAL_CST
995 && TREE_CODE (ipart) == REAL_CST)
996 || (TREE_CODE (rpart) == INTEGER_CST
997 && TREE_CODE (ipart) == INTEGER_CST))
998 return build_complex (type, rpart, ipart);
1003 return fold_convert (type, TREE_OPERAND (t, 0));
1006 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1008 /* -(A + B) -> (-B) - A. */
1009 if (negate_expr_p (TREE_OPERAND (t, 1))
1010 && reorder_operands_p (TREE_OPERAND (t, 0),
1011 TREE_OPERAND (t, 1)))
1013 tem = negate_expr (TREE_OPERAND (t, 1));
1014 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1015 tem, TREE_OPERAND (t, 0)));
1016 return fold_convert (type, tem);
1019 /* -(A + B) -> (-A) - B. */
1020 if (negate_expr_p (TREE_OPERAND (t, 0)))
1022 tem = negate_expr (TREE_OPERAND (t, 0));
1023 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1024 tem, TREE_OPERAND (t, 1)));
1025 return fold_convert (type, tem);
1031 /* - (A - B) -> B - A */
1032 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1033 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1034 return fold_convert (type,
1035 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1036 TREE_OPERAND (t, 1),
1037 TREE_OPERAND (t, 0))));
1041 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1047 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1049 tem = TREE_OPERAND (t, 1);
1050 if (negate_expr_p (tem))
1051 return fold_convert (type,
1052 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1053 TREE_OPERAND (t, 0),
1054 negate_expr (tem))));
1055 tem = TREE_OPERAND (t, 0);
1056 if (negate_expr_p (tem))
1057 return fold_convert (type,
1058 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1060 TREE_OPERAND (t, 1))));
1065 /* Convert -((double)float) into (double)(-float). */
1066 if (TREE_CODE (type) == REAL_TYPE)
1068 tem = strip_float_extensions (t);
1069 if (tem != t && negate_expr_p (tem))
1070 return fold_convert (type, negate_expr (tem));
1075 /* Negate -f(x) as f(-x). */
1076 if (negate_mathfn_p (builtin_mathfn_code (t))
1077 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1079 tree fndecl, arg, arglist;
1081 fndecl = get_callee_fndecl (t);
1082 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1083 arglist = build_tree_list (NULL_TREE, arg);
1084 return build_function_call_expr (fndecl, arglist);
1089 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1090 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1092 tree op1 = TREE_OPERAND (t, 1);
1093 if (TREE_INT_CST_HIGH (op1) == 0
1094 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1095 == TREE_INT_CST_LOW (op1))
1097 tree ntype = TYPE_UNSIGNED (type)
1098 ? lang_hooks.types.signed_type (type)
1099 : lang_hooks.types.unsigned_type (type);
1100 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1101 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1102 return fold_convert (type, temp);
1111 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1112 return fold_convert (type, tem);
1115 /* Split a tree IN into a constant, literal and variable parts that could be
1116 combined with CODE to make IN. "constant" means an expression with
1117 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1118 commutative arithmetic operation. Store the constant part into *CONP,
1119 the literal in *LITP and return the variable part. If a part isn't
1120 present, set it to null. If the tree does not decompose in this way,
1121 return the entire tree as the variable part and the other parts as null.
1123 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1124 case, we negate an operand that was subtracted. Except if it is a
1125 literal for which we use *MINUS_LITP instead.
1127 If NEGATE_P is true, we are negating all of IN, again except a literal
1128 for which we use *MINUS_LITP instead.
1130 If IN is itself a literal or constant, return it as appropriate.
1132 Note that we do not guarantee that any of the three values will be the
1133 same type as IN, but they will have the same signedness and mode. */
1136 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1137 tree *minus_litp, int negate_p)
1145 /* Strip any conversions that don't change the machine mode or signedness. */
1146 STRIP_SIGN_NOPS (in);
1148 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1150 else if (TREE_CODE (in) == code
1151 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1152 /* We can associate addition and subtraction together (even
1153 though the C standard doesn't say so) for integers because
1154 the value is not affected. For reals, the value might be
1155 affected, so we can't. */
1156 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1157 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1159 tree op0 = TREE_OPERAND (in, 0);
1160 tree op1 = TREE_OPERAND (in, 1);
1161 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1162 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1164 /* First see if either of the operands is a literal, then a constant. */
1165 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1166 *litp = op0, op0 = 0;
1167 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1168 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1170 if (op0 != 0 && TREE_CONSTANT (op0))
1171 *conp = op0, op0 = 0;
1172 else if (op1 != 0 && TREE_CONSTANT (op1))
1173 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1175 /* If we haven't dealt with either operand, this is not a case we can
1176 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1177 if (op0 != 0 && op1 != 0)
1182 var = op1, neg_var_p = neg1_p;
1184 /* Now do any needed negations. */
1186 *minus_litp = *litp, *litp = 0;
1188 *conp = negate_expr (*conp);
1190 var = negate_expr (var);
1192 else if (TREE_CONSTANT (in))
1200 *minus_litp = *litp, *litp = 0;
1201 else if (*minus_litp)
1202 *litp = *minus_litp, *minus_litp = 0;
1203 *conp = negate_expr (*conp);
1204 var = negate_expr (var);
1210 /* Re-associate trees split by the above function. T1 and T2 are either
1211 expressions to associate or null. Return the new expression, if any. If
1212 we build an operation, do it in TYPE and with CODE. */
1215 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1222 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1223 try to fold this since we will have infinite recursion. But do
1224 deal with any NEGATE_EXPRs. */
1225 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1226 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1228 if (code == PLUS_EXPR)
1230 if (TREE_CODE (t1) == NEGATE_EXPR)
1231 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1232 fold_convert (type, TREE_OPERAND (t1, 0)));
1233 else if (TREE_CODE (t2) == NEGATE_EXPR)
1234 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1235 fold_convert (type, TREE_OPERAND (t2, 0)));
1237 return build2 (code, type, fold_convert (type, t1),
1238 fold_convert (type, t2));
1241 return fold (build2 (code, type, fold_convert (type, t1),
1242 fold_convert (type, t2)));
1245 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1246 to produce a new constant.
1248 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1251 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1253 unsigned HOST_WIDE_INT int1l, int2l;
1254 HOST_WIDE_INT int1h, int2h;
1255 unsigned HOST_WIDE_INT low;
1257 unsigned HOST_WIDE_INT garbagel;
1258 HOST_WIDE_INT garbageh;
1260 tree type = TREE_TYPE (arg1);
1261 int uns = TYPE_UNSIGNED (type);
1263 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1265 int no_overflow = 0;
1267 int1l = TREE_INT_CST_LOW (arg1);
1268 int1h = TREE_INT_CST_HIGH (arg1);
1269 int2l = TREE_INT_CST_LOW (arg2);
1270 int2h = TREE_INT_CST_HIGH (arg2);
1275 low = int1l | int2l, hi = int1h | int2h;
1279 low = int1l ^ int2l, hi = int1h ^ int2h;
1283 low = int1l & int2l, hi = int1h & int2h;
1289 /* It's unclear from the C standard whether shifts can overflow.
1290 The following code ignores overflow; perhaps a C standard
1291 interpretation ruling is needed. */
1292 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1300 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1305 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1309 neg_double (int2l, int2h, &low, &hi);
1310 add_double (int1l, int1h, low, hi, &low, &hi);
1311 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1315 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1318 case TRUNC_DIV_EXPR:
1319 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1320 case EXACT_DIV_EXPR:
1321 /* This is a shortcut for a common special case. */
1322 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1323 && ! TREE_CONSTANT_OVERFLOW (arg1)
1324 && ! TREE_CONSTANT_OVERFLOW (arg2)
1325 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1327 if (code == CEIL_DIV_EXPR)
1330 low = int1l / int2l, hi = 0;
1334 /* ... fall through ... */
1336 case ROUND_DIV_EXPR:
1337 if (int2h == 0 && int2l == 1)
1339 low = int1l, hi = int1h;
1342 if (int1l == int2l && int1h == int2h
1343 && ! (int1l == 0 && int1h == 0))
1348 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1349 &low, &hi, &garbagel, &garbageh);
1352 case TRUNC_MOD_EXPR:
1353 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1354 /* This is a shortcut for a common special case. */
1355 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1356 && ! TREE_CONSTANT_OVERFLOW (arg1)
1357 && ! TREE_CONSTANT_OVERFLOW (arg2)
1358 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1360 if (code == CEIL_MOD_EXPR)
1362 low = int1l % int2l, hi = 0;
1366 /* ... fall through ... */
1368 case ROUND_MOD_EXPR:
1369 overflow = div_and_round_double (code, uns,
1370 int1l, int1h, int2l, int2h,
1371 &garbagel, &garbageh, &low, &hi);
1377 low = (((unsigned HOST_WIDE_INT) int1h
1378 < (unsigned HOST_WIDE_INT) int2h)
1379 || (((unsigned HOST_WIDE_INT) int1h
1380 == (unsigned HOST_WIDE_INT) int2h)
1383 low = (int1h < int2h
1384 || (int1h == int2h && int1l < int2l));
1386 if (low == (code == MIN_EXPR))
1387 low = int1l, hi = int1h;
1389 low = int2l, hi = int2h;
1396 /* If this is for a sizetype, can be represented as one (signed)
1397 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1400 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1401 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1402 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1403 return size_int_type_wide (low, type);
1406 t = build_int_2 (low, hi);
1407 TREE_TYPE (t) = TREE_TYPE (arg1);
1412 ? (!uns || is_sizetype) && overflow
1413 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1415 | TREE_OVERFLOW (arg1)
1416 | TREE_OVERFLOW (arg2));
1418 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1419 So check if force_fit_type truncated the value. */
1421 && ! TREE_OVERFLOW (t)
1422 && (TREE_INT_CST_HIGH (t) != hi
1423 || TREE_INT_CST_LOW (t) != low))
1424 TREE_OVERFLOW (t) = 1;
1426 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1427 | TREE_CONSTANT_OVERFLOW (arg1)
1428 | TREE_CONSTANT_OVERFLOW (arg2));
1432 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1433 constant. We assume ARG1 and ARG2 have the same data type, or at least
1434 are the same kind of constant and the same machine mode.
1436 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1439 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1444 if (TREE_CODE (arg1) == INTEGER_CST)
1445 return int_const_binop (code, arg1, arg2, notrunc);
1447 if (TREE_CODE (arg1) == REAL_CST)
1449 enum machine_mode mode;
1452 REAL_VALUE_TYPE value;
1455 d1 = TREE_REAL_CST (arg1);
1456 d2 = TREE_REAL_CST (arg2);
1458 type = TREE_TYPE (arg1);
1459 mode = TYPE_MODE (type);
1461 /* Don't perform operation if we honor signaling NaNs and
1462 either operand is a NaN. */
1463 if (HONOR_SNANS (mode)
1464 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1467 /* Don't perform operation if it would raise a division
1468 by zero exception. */
1469 if (code == RDIV_EXPR
1470 && REAL_VALUES_EQUAL (d2, dconst0)
1471 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1474 /* If either operand is a NaN, just return it. Otherwise, set up
1475 for floating-point trap; we return an overflow. */
1476 if (REAL_VALUE_ISNAN (d1))
1478 else if (REAL_VALUE_ISNAN (d2))
1481 REAL_ARITHMETIC (value, code, d1, d2);
1483 t = build_real (type, real_value_truncate (mode, value));
1486 = (force_fit_type (t, 0)
1487 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1488 TREE_CONSTANT_OVERFLOW (t)
1490 | TREE_CONSTANT_OVERFLOW (arg1)
1491 | TREE_CONSTANT_OVERFLOW (arg2);
1494 if (TREE_CODE (arg1) == COMPLEX_CST)
1496 tree type = TREE_TYPE (arg1);
1497 tree r1 = TREE_REALPART (arg1);
1498 tree i1 = TREE_IMAGPART (arg1);
1499 tree r2 = TREE_REALPART (arg2);
1500 tree i2 = TREE_IMAGPART (arg2);
1506 t = build_complex (type,
1507 const_binop (PLUS_EXPR, r1, r2, notrunc),
1508 const_binop (PLUS_EXPR, i1, i2, notrunc));
1512 t = build_complex (type,
1513 const_binop (MINUS_EXPR, r1, r2, notrunc),
1514 const_binop (MINUS_EXPR, i1, i2, notrunc));
1518 t = build_complex (type,
1519 const_binop (MINUS_EXPR,
1520 const_binop (MULT_EXPR,
1522 const_binop (MULT_EXPR,
1525 const_binop (PLUS_EXPR,
1526 const_binop (MULT_EXPR,
1528 const_binop (MULT_EXPR,
1536 = const_binop (PLUS_EXPR,
1537 const_binop (MULT_EXPR, r2, r2, notrunc),
1538 const_binop (MULT_EXPR, i2, i2, notrunc),
1541 t = build_complex (type,
1543 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1544 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1545 const_binop (PLUS_EXPR,
1546 const_binop (MULT_EXPR, r1, r2,
1548 const_binop (MULT_EXPR, i1, i2,
1551 magsquared, notrunc),
1553 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1554 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1555 const_binop (MINUS_EXPR,
1556 const_binop (MULT_EXPR, i1, r2,
1558 const_binop (MULT_EXPR, r1, i2,
1561 magsquared, notrunc));
1573 /* These are the hash table functions for the hash table of INTEGER_CST
1574 nodes of a sizetype. */
1576 /* Return the hash code code X, an INTEGER_CST. */
1579 size_htab_hash (const void *x)
1583 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1584 ^ htab_hash_pointer (TREE_TYPE (t))
1585 ^ (TREE_OVERFLOW (t) << 20));
1588 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1589 is the same as that given by *Y, which is the same. */
1592 size_htab_eq (const void *x, const void *y)
1597 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1598 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1599 && TREE_TYPE (xt) == TREE_TYPE (yt)
1600 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1603 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1604 bits are given by NUMBER and of the sizetype represented by KIND. */
1607 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1609 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1612 /* Likewise, but the desired type is specified explicitly. */
1614 static GTY (()) tree new_const;
1615 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1619 size_int_type_wide (HOST_WIDE_INT number, tree type)
1625 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1626 new_const = make_node (INTEGER_CST);
1629 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1630 hash table, we return the value from the hash table. Otherwise, we
1631 place that in the hash table and make a new node for the next time. */
1632 TREE_INT_CST_LOW (new_const) = number;
1633 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1634 TREE_TYPE (new_const) = type;
1635 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1636 = force_fit_type (new_const, 0);
1638 slot = htab_find_slot (size_htab, new_const, INSERT);
1644 new_const = make_node (INTEGER_CST);
1648 return (tree) *slot;
1651 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1652 is a tree code. The type of the result is taken from the operands.
1653 Both must be the same type integer type and it must be a size type.
1654 If the operands are constant, so is the result. */
1657 size_binop (enum tree_code code, tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1661 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1662 || type != TREE_TYPE (arg1))
1665 /* Handle the special case of two integer constants faster. */
1666 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1668 /* And some specific cases even faster than that. */
1669 if (code == PLUS_EXPR && integer_zerop (arg0))
1671 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1672 && integer_zerop (arg1))
1674 else if (code == MULT_EXPR && integer_onep (arg0))
1677 /* Handle general case of two integer constants. */
1678 return int_const_binop (code, arg0, arg1, 0);
1681 if (arg0 == error_mark_node || arg1 == error_mark_node)
1682 return error_mark_node;
1684 return fold (build2 (code, type, arg0, arg1));
1687 /* Given two values, either both of sizetype or both of bitsizetype,
1688 compute the difference between the two values. Return the value
1689 in signed type corresponding to the type of the operands. */
1692 size_diffop (tree arg0, tree arg1)
1694 tree type = TREE_TYPE (arg0);
1697 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1698 || type != TREE_TYPE (arg1))
1701 /* If the type is already signed, just do the simple thing. */
1702 if (!TYPE_UNSIGNED (type))
1703 return size_binop (MINUS_EXPR, arg0, arg1);
1705 ctype = (type == bitsizetype || type == ubitsizetype
1706 ? sbitsizetype : ssizetype);
1708 /* If either operand is not a constant, do the conversions to the signed
1709 type and subtract. The hardware will do the right thing with any
1710 overflow in the subtraction. */
1711 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1712 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1713 fold_convert (ctype, arg1));
1715 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1716 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1717 overflow) and negate (which can't either). Special-case a result
1718 of zero while we're here. */
1719 if (tree_int_cst_equal (arg0, arg1))
1720 return fold_convert (ctype, integer_zero_node);
1721 else if (tree_int_cst_lt (arg1, arg0))
1722 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1724 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1725 fold_convert (ctype, size_binop (MINUS_EXPR,
1730 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1731 type TYPE. If no simplification can be done return NULL_TREE. */
1734 fold_convert_const (enum tree_code code, tree type, tree arg1)
1739 if (TREE_TYPE (arg1) == type)
1742 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1744 if (TREE_CODE (arg1) == INTEGER_CST)
1746 /* If we would build a constant wider than GCC supports,
1747 leave the conversion unfolded. */
1748 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1751 /* If we are trying to make a sizetype for a small integer, use
1752 size_int to pick up cached types to reduce duplicate nodes. */
1753 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1754 && !TREE_CONSTANT_OVERFLOW (arg1)
1755 && compare_tree_int (arg1, 10000) < 0)
1756 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1758 /* Given an integer constant, make new constant with new type,
1759 appropriately sign-extended or truncated. */
1760 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1761 TREE_INT_CST_HIGH (arg1));
1762 TREE_TYPE (t) = type;
1763 /* Indicate an overflow if (1) ARG1 already overflowed,
1764 or (2) force_fit_type indicates an overflow.
1765 Tell force_fit_type that an overflow has already occurred
1766 if ARG1 is a too-large unsigned value and T is signed.
1767 But don't indicate an overflow if converting a pointer. */
1769 = ((force_fit_type (t,
1770 (TREE_INT_CST_HIGH (arg1) < 0
1771 && (TYPE_UNSIGNED (type)
1772 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1773 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1774 || TREE_OVERFLOW (arg1));
1775 TREE_CONSTANT_OVERFLOW (t)
1776 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1779 else if (TREE_CODE (arg1) == REAL_CST)
1781 /* The following code implements the floating point to integer
1782 conversion rules required by the Java Language Specification,
1783 that IEEE NaNs are mapped to zero and values that overflow
1784 the target precision saturate, i.e. values greater than
1785 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1786 are mapped to INT_MIN. These semantics are allowed by the
1787 C and C++ standards that simply state that the behavior of
1788 FP-to-integer conversion is unspecified upon overflow. */
1790 HOST_WIDE_INT high, low;
1793 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1797 case FIX_TRUNC_EXPR:
1798 real_trunc (&r, VOIDmode, &x);
1802 real_ceil (&r, VOIDmode, &x);
1805 case FIX_FLOOR_EXPR:
1806 real_floor (&r, VOIDmode, &x);
1809 case FIX_ROUND_EXPR:
1810 real_round (&r, VOIDmode, &x);
1817 /* If R is NaN, return zero and show we have an overflow. */
1818 if (REAL_VALUE_ISNAN (r))
1825 /* See if R is less than the lower bound or greater than the
1830 tree lt = TYPE_MIN_VALUE (type);
1831 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1832 if (REAL_VALUES_LESS (r, l))
1835 high = TREE_INT_CST_HIGH (lt);
1836 low = TREE_INT_CST_LOW (lt);
1842 tree ut = TYPE_MAX_VALUE (type);
1845 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1846 if (REAL_VALUES_LESS (u, r))
1849 high = TREE_INT_CST_HIGH (ut);
1850 low = TREE_INT_CST_LOW (ut);
1856 REAL_VALUE_TO_INT (&low, &high, r);
1858 t = build_int_2 (low, high);
1859 TREE_TYPE (t) = type;
1861 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1862 TREE_CONSTANT_OVERFLOW (t)
1863 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1867 else if (TREE_CODE (type) == REAL_TYPE)
1869 if (TREE_CODE (arg1) == INTEGER_CST)
1870 return build_real_from_int_cst (type, arg1);
1871 if (TREE_CODE (arg1) == REAL_CST)
1873 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1875 /* We make a copy of ARG1 so that we don't modify an
1876 existing constant tree. */
1877 t = copy_node (arg1);
1878 TREE_TYPE (t) = type;
1882 t = build_real (type,
1883 real_value_truncate (TYPE_MODE (type),
1884 TREE_REAL_CST (arg1)));
1887 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1888 TREE_CONSTANT_OVERFLOW (t)
1889 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1896 /* Convert expression ARG to type TYPE. Used by the middle-end for
1897 simple conversions in preference to calling the front-end's convert. */
1900 fold_convert (tree type, tree arg)
1902 tree orig = TREE_TYPE (arg);
1908 if (TREE_CODE (arg) == ERROR_MARK
1909 || TREE_CODE (type) == ERROR_MARK
1910 || TREE_CODE (orig) == ERROR_MARK)
1911 return error_mark_node;
1913 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1914 return fold (build1 (NOP_EXPR, type, arg));
1916 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1917 || TREE_CODE (type) == OFFSET_TYPE)
1919 if (TREE_CODE (arg) == INTEGER_CST)
1921 tem = fold_convert_const (NOP_EXPR, type, arg);
1922 if (tem != NULL_TREE)
1925 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1926 || TREE_CODE (orig) == OFFSET_TYPE)
1927 return fold (build1 (NOP_EXPR, type, arg));
1928 if (TREE_CODE (orig) == COMPLEX_TYPE)
1930 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1931 return fold_convert (type, tem);
1933 if (TREE_CODE (orig) == VECTOR_TYPE
1934 && GET_MODE_SIZE (TYPE_MODE (type))
1935 == GET_MODE_SIZE (TYPE_MODE (orig)))
1936 return fold (build1 (NOP_EXPR, type, arg));
1938 else if (TREE_CODE (type) == REAL_TYPE)
1940 if (TREE_CODE (arg) == INTEGER_CST)
1942 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1946 else if (TREE_CODE (arg) == REAL_CST)
1948 tem = fold_convert_const (NOP_EXPR, type, arg);
1949 if (tem != NULL_TREE)
1953 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1954 return fold (build1 (FLOAT_EXPR, type, arg));
1955 if (TREE_CODE (orig) == REAL_TYPE)
1956 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1960 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1961 return fold_convert (type, tem);
1964 else if (TREE_CODE (type) == COMPLEX_TYPE)
1966 if (INTEGRAL_TYPE_P (orig)
1967 || POINTER_TYPE_P (orig)
1968 || TREE_CODE (orig) == REAL_TYPE)
1969 return build2 (COMPLEX_EXPR, type,
1970 fold_convert (TREE_TYPE (type), arg),
1971 fold_convert (TREE_TYPE (type), integer_zero_node));
1972 if (TREE_CODE (orig) == COMPLEX_TYPE)
1976 if (TREE_CODE (arg) == COMPLEX_EXPR)
1978 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1979 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1980 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1983 arg = save_expr (arg);
1984 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1985 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1986 rpart = fold_convert (TREE_TYPE (type), rpart);
1987 ipart = fold_convert (TREE_TYPE (type), ipart);
1988 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1991 else if (TREE_CODE (type) == VECTOR_TYPE)
1993 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1994 && GET_MODE_SIZE (TYPE_MODE (type))
1995 == GET_MODE_SIZE (TYPE_MODE (orig)))
1996 return fold (build1 (NOP_EXPR, type, arg));
1997 if (TREE_CODE (orig) == VECTOR_TYPE
1998 && GET_MODE_SIZE (TYPE_MODE (type))
1999 == GET_MODE_SIZE (TYPE_MODE (orig)))
2000 return fold (build1 (NOP_EXPR, type, arg));
2002 else if (VOID_TYPE_P (type))
2003 return fold (build1 (CONVERT_EXPR, type, arg));
2007 /* Return an expr equal to X but certainly not valid as an lvalue. */
2012 /* We only need to wrap lvalue tree codes. */
2013 switch (TREE_CODE (x))
2027 case ARRAY_RANGE_REF:
2032 case PREINCREMENT_EXPR:
2033 case PREDECREMENT_EXPR:
2036 case TRY_CATCH_EXPR:
2037 case WITH_CLEANUP_EXPR:
2049 /* Assume the worst for front-end tree codes. */
2050 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2054 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2057 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2058 Zero means allow extended lvalues. */
2060 int pedantic_lvalues;
2062 /* When pedantic, return an expr equal to X but certainly not valid as a
2063 pedantic lvalue. Otherwise, return X. */
2066 pedantic_non_lvalue (tree x)
2068 if (pedantic_lvalues)
2069 return non_lvalue (x);
2074 /* Given a tree comparison code, return the code that is the logical inverse
2075 of the given code. It is not safe to do this for floating-point
2076 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2077 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2079 static enum tree_code
2080 invert_tree_comparison (enum tree_code code, bool honor_nans)
2082 if (honor_nans && flag_trapping_math)
2092 return honor_nans ? UNLE_EXPR : LE_EXPR;
2094 return honor_nans ? UNLT_EXPR : LT_EXPR;
2096 return honor_nans ? UNGE_EXPR : GE_EXPR;
2098 return honor_nans ? UNGT_EXPR : GT_EXPR;
2112 return UNORDERED_EXPR;
2113 case UNORDERED_EXPR:
2114 return ORDERED_EXPR;
2120 /* Similar, but return the comparison that results if the operands are
2121 swapped. This is safe for floating-point. */
2123 static enum tree_code
2124 swap_tree_comparison (enum tree_code code)
2145 /* Convert a comparison tree code from an enum tree_code representation
2146 into a compcode bit-based encoding. This function is the inverse of
2147 compcode_to_comparison. */
2149 static enum comparison_code
2150 comparison_to_compcode (enum tree_code code)
2167 return COMPCODE_ORD;
2168 case UNORDERED_EXPR:
2169 return COMPCODE_UNORD;
2171 return COMPCODE_UNLT;
2173 return COMPCODE_UNEQ;
2175 return COMPCODE_UNLE;
2177 return COMPCODE_UNGT;
2179 return COMPCODE_LTGT;
2181 return COMPCODE_UNGE;
2187 /* Convert a compcode bit-based encoding of a comparison operator back
2188 to GCC's enum tree_code representation. This function is the
2189 inverse of comparison_to_compcode. */
2191 static enum tree_code
2192 compcode_to_comparison (enum comparison_code code)
2209 return ORDERED_EXPR;
2210 case COMPCODE_UNORD:
2211 return UNORDERED_EXPR;
2229 /* Return a tree for the comparison which is the combination of
2230 doing the AND or OR (depending on CODE) of the two operations LCODE
2231 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2232 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2233 if this makes the transformation invalid. */
2236 combine_comparisons (enum tree_code code, enum tree_code lcode,
2237 enum tree_code rcode, tree truth_type,
2238 tree ll_arg, tree lr_arg)
2240 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2241 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2242 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2243 enum comparison_code compcode;
2247 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2248 compcode = lcompcode & rcompcode;
2251 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2252 compcode = lcompcode | rcompcode;
2261 /* Eliminate unordered comparisons, as well as LTGT and ORD
2262 which are not used unless the mode has NaNs. */
2263 compcode &= ~COMPCODE_UNORD;
2264 if (compcode == COMPCODE_LTGT)
2265 compcode = COMPCODE_NE;
2266 else if (compcode == COMPCODE_ORD)
2267 compcode = COMPCODE_TRUE;
2269 else if (flag_trapping_math)
2271 /* Check that the original operation and the optimized ones will trap
2272 under the same condition. */
2273 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2274 && (lcompcode != COMPCODE_EQ)
2275 && (lcompcode != COMPCODE_ORD);
2276 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2277 && (rcompcode != COMPCODE_EQ)
2278 && (rcompcode != COMPCODE_ORD);
2279 bool trap = (compcode & COMPCODE_UNORD) == 0
2280 && (compcode != COMPCODE_EQ)
2281 && (compcode != COMPCODE_ORD);
2283 /* In a short-circuited boolean expression the LHS might be
2284 such that the RHS, if evaluated, will never trap. For
2285 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2286 if neither x nor y is NaN. (This is a mixed blessing: for
2287 example, the expression above will never trap, hence
2288 optimizing it to x < y would be invalid). */
2289 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2290 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2293 /* If the comparison was short-circuited, and only the RHS
2294 trapped, we may now generate a spurious trap. */
2296 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2299 /* If we changed the conditions that cause a trap, we lose. */
2300 if ((ltrap || rtrap) != trap)
2304 if (compcode == COMPCODE_TRUE)
2305 return constant_boolean_node (true, truth_type);
2306 else if (compcode == COMPCODE_FALSE)
2307 return constant_boolean_node (false, truth_type);
2309 return fold (build2 (compcode_to_comparison (compcode),
2310 truth_type, ll_arg, lr_arg));
2313 /* Return nonzero if CODE is a tree code that represents a truth value. */
2316 truth_value_p (enum tree_code code)
2318 return (TREE_CODE_CLASS (code) == '<'
2319 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2320 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2321 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2324 /* Return nonzero if two operands (typically of the same tree node)
2325 are necessarily equal. If either argument has side-effects this
2326 function returns zero. FLAGS modifies behavior as follows:
2328 If OEP_ONLY_CONST is set, only return nonzero for constants.
2329 This function tests whether the operands are indistinguishable;
2330 it does not test whether they are equal using C's == operation.
2331 The distinction is important for IEEE floating point, because
2332 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2333 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2335 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2336 even though it may hold multiple values during a function.
2337 This is because a GCC tree node guarantees that nothing else is
2338 executed between the evaluation of its "operands" (which may often
2339 be evaluated in arbitrary order). Hence if the operands themselves
2340 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2341 same value in each operand/subexpression. Hence a zero value for
2342 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2343 If comparing arbitrary expression trees, such as from different
2344 statements, ONLY_CONST must usually be nonzero.
2346 If OEP_PURE_SAME is set, then pure functions with identical arguments
2347 are considered the same. It is used when the caller has other ways
2348 to ensure that global memory is unchanged in between. */
2351 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2353 /* If either is ERROR_MARK, they aren't equal. */
2354 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2357 /* If both types don't have the same signedness, then we can't consider
2358 them equal. We must check this before the STRIP_NOPS calls
2359 because they may change the signedness of the arguments. */
2360 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2366 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2367 /* This is needed for conversions and for COMPONENT_REF.
2368 Might as well play it safe and always test this. */
2369 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2370 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2371 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2374 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2375 We don't care about side effects in that case because the SAVE_EXPR
2376 takes care of that for us. In all other cases, two expressions are
2377 equal if they have no side effects. If we have two identical
2378 expressions with side effects that should be treated the same due
2379 to the only side effects being identical SAVE_EXPR's, that will
2380 be detected in the recursive calls below. */
2381 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2382 && (TREE_CODE (arg0) == SAVE_EXPR
2383 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2386 /* Next handle constant cases, those for which we can return 1 even
2387 if ONLY_CONST is set. */
2388 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2389 switch (TREE_CODE (arg0))
2392 return (! TREE_CONSTANT_OVERFLOW (arg0)
2393 && ! TREE_CONSTANT_OVERFLOW (arg1)
2394 && tree_int_cst_equal (arg0, arg1));
2397 return (! TREE_CONSTANT_OVERFLOW (arg0)
2398 && ! TREE_CONSTANT_OVERFLOW (arg1)
2399 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2400 TREE_REAL_CST (arg1)));
2406 if (TREE_CONSTANT_OVERFLOW (arg0)
2407 || TREE_CONSTANT_OVERFLOW (arg1))
2410 v1 = TREE_VECTOR_CST_ELTS (arg0);
2411 v2 = TREE_VECTOR_CST_ELTS (arg1);
2414 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2417 v1 = TREE_CHAIN (v1);
2418 v2 = TREE_CHAIN (v2);
2425 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2427 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2431 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2432 && ! memcmp (TREE_STRING_POINTER (arg0),
2433 TREE_STRING_POINTER (arg1),
2434 TREE_STRING_LENGTH (arg0)));
2437 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2443 if (flags & OEP_ONLY_CONST)
2446 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2449 /* Two conversions are equal only if signedness and modes match. */
2450 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2451 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2452 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2455 return operand_equal_p (TREE_OPERAND (arg0, 0),
2456 TREE_OPERAND (arg1, 0), flags);
2460 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2461 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2465 /* For commutative ops, allow the other order. */
2466 return (commutative_tree_code (TREE_CODE (arg0))
2467 && operand_equal_p (TREE_OPERAND (arg0, 0),
2468 TREE_OPERAND (arg1, 1), flags)
2469 && operand_equal_p (TREE_OPERAND (arg0, 1),
2470 TREE_OPERAND (arg1, 0), flags));
2473 /* If either of the pointer (or reference) expressions we are
2474 dereferencing contain a side effect, these cannot be equal. */
2475 if (TREE_SIDE_EFFECTS (arg0)
2476 || TREE_SIDE_EFFECTS (arg1))
2479 switch (TREE_CODE (arg0))
2482 return operand_equal_p (TREE_OPERAND (arg0, 0),
2483 TREE_OPERAND (arg1, 0), flags);
2487 case ARRAY_RANGE_REF:
2488 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2489 TREE_OPERAND (arg1, 0), flags)
2490 && operand_equal_p (TREE_OPERAND (arg0, 1),
2491 TREE_OPERAND (arg1, 1), flags));
2494 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2495 TREE_OPERAND (arg1, 0), flags)
2496 && operand_equal_p (TREE_OPERAND (arg0, 1),
2497 TREE_OPERAND (arg1, 1), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 2),
2499 TREE_OPERAND (arg1, 2), flags));
2505 switch (TREE_CODE (arg0))
2508 case TRUTH_NOT_EXPR:
2509 return operand_equal_p (TREE_OPERAND (arg0, 0),
2510 TREE_OPERAND (arg1, 0), flags);
2513 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2516 /* If the CALL_EXPRs call different functions, then they
2517 clearly can not be equal. */
2518 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2519 TREE_OPERAND (arg1, 0), flags))
2523 unsigned int cef = call_expr_flags (arg0);
2524 if (flags & OEP_PURE_SAME)
2525 cef &= ECF_CONST | ECF_PURE;
2532 /* Now see if all the arguments are the same. operand_equal_p
2533 does not handle TREE_LIST, so we walk the operands here
2534 feeding them to operand_equal_p. */
2535 arg0 = TREE_OPERAND (arg0, 1);
2536 arg1 = TREE_OPERAND (arg1, 1);
2537 while (arg0 && arg1)
2539 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2543 arg0 = TREE_CHAIN (arg0);
2544 arg1 = TREE_CHAIN (arg1);
2547 /* If we get here and both argument lists are exhausted
2548 then the CALL_EXPRs are equal. */
2549 return ! (arg0 || arg1);
2556 /* Consider __builtin_sqrt equal to sqrt. */
2557 return (TREE_CODE (arg0) == FUNCTION_DECL
2558 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2559 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2560 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2567 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2568 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2570 When in doubt, return 0. */
2573 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2575 int unsignedp1, unsignedpo;
2576 tree primarg0, primarg1, primother;
2577 unsigned int correct_width;
2579 if (operand_equal_p (arg0, arg1, 0))
2582 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2583 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2586 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2587 and see if the inner values are the same. This removes any
2588 signedness comparison, which doesn't matter here. */
2589 primarg0 = arg0, primarg1 = arg1;
2590 STRIP_NOPS (primarg0);
2591 STRIP_NOPS (primarg1);
2592 if (operand_equal_p (primarg0, primarg1, 0))
2595 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2596 actual comparison operand, ARG0.
2598 First throw away any conversions to wider types
2599 already present in the operands. */
2601 primarg1 = get_narrower (arg1, &unsignedp1);
2602 primother = get_narrower (other, &unsignedpo);
2604 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2605 if (unsignedp1 == unsignedpo
2606 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2607 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2609 tree type = TREE_TYPE (arg0);
2611 /* Make sure shorter operand is extended the right way
2612 to match the longer operand. */
2613 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2614 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2616 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2623 /* See if ARG is an expression that is either a comparison or is performing
2624 arithmetic on comparisons. The comparisons must only be comparing
2625 two different values, which will be stored in *CVAL1 and *CVAL2; if
2626 they are nonzero it means that some operands have already been found.
2627 No variables may be used anywhere else in the expression except in the
2628 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2629 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2631 If this is true, return 1. Otherwise, return zero. */
2634 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2636 enum tree_code code = TREE_CODE (arg);
2637 char class = TREE_CODE_CLASS (code);
2639 /* We can handle some of the 'e' cases here. */
2640 if (class == 'e' && code == TRUTH_NOT_EXPR)
2642 else if (class == 'e'
2643 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2644 || code == COMPOUND_EXPR))
2647 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2648 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2650 /* If we've already found a CVAL1 or CVAL2, this expression is
2651 two complex to handle. */
2652 if (*cval1 || *cval2)
2662 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2665 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2666 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2667 cval1, cval2, save_p));
2673 if (code == COND_EXPR)
2674 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2675 cval1, cval2, save_p)
2676 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2677 cval1, cval2, save_p)
2678 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2679 cval1, cval2, save_p));
2683 /* First see if we can handle the first operand, then the second. For
2684 the second operand, we know *CVAL1 can't be zero. It must be that
2685 one side of the comparison is each of the values; test for the
2686 case where this isn't true by failing if the two operands
2689 if (operand_equal_p (TREE_OPERAND (arg, 0),
2690 TREE_OPERAND (arg, 1), 0))
2694 *cval1 = TREE_OPERAND (arg, 0);
2695 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2697 else if (*cval2 == 0)
2698 *cval2 = TREE_OPERAND (arg, 0);
2699 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2704 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2706 else if (*cval2 == 0)
2707 *cval2 = TREE_OPERAND (arg, 1);
2708 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2720 /* ARG is a tree that is known to contain just arithmetic operations and
2721 comparisons. Evaluate the operations in the tree substituting NEW0 for
2722 any occurrence of OLD0 as an operand of a comparison and likewise for
2726 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2728 tree type = TREE_TYPE (arg);
2729 enum tree_code code = TREE_CODE (arg);
2730 char class = TREE_CODE_CLASS (code);
2732 /* We can handle some of the 'e' cases here. */
2733 if (class == 'e' && code == TRUTH_NOT_EXPR)
2735 else if (class == 'e'
2736 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2742 return fold (build1 (code, type,
2743 eval_subst (TREE_OPERAND (arg, 0),
2744 old0, new0, old1, new1)));
2747 return fold (build2 (code, type,
2748 eval_subst (TREE_OPERAND (arg, 0),
2749 old0, new0, old1, new1),
2750 eval_subst (TREE_OPERAND (arg, 1),
2751 old0, new0, old1, new1)));
2757 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2760 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2763 return fold (build3 (code, type,
2764 eval_subst (TREE_OPERAND (arg, 0),
2765 old0, new0, old1, new1),
2766 eval_subst (TREE_OPERAND (arg, 1),
2767 old0, new0, old1, new1),
2768 eval_subst (TREE_OPERAND (arg, 2),
2769 old0, new0, old1, new1)));
2773 /* Fall through - ??? */
2777 tree arg0 = TREE_OPERAND (arg, 0);
2778 tree arg1 = TREE_OPERAND (arg, 1);
2780 /* We need to check both for exact equality and tree equality. The
2781 former will be true if the operand has a side-effect. In that
2782 case, we know the operand occurred exactly once. */
2784 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2786 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2789 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2791 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2794 return fold (build2 (code, type, arg0, arg1));
2802 /* Return a tree for the case when the result of an expression is RESULT
2803 converted to TYPE and OMITTED was previously an operand of the expression
2804 but is now not needed (e.g., we folded OMITTED * 0).
2806 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2807 the conversion of RESULT to TYPE. */
2810 omit_one_operand (tree type, tree result, tree omitted)
2812 tree t = fold_convert (type, result);
2814 if (TREE_SIDE_EFFECTS (omitted))
2815 return build2 (COMPOUND_EXPR, type, omitted, t);
2817 return non_lvalue (t);
2820 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2823 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2825 tree t = fold_convert (type, result);
2827 if (TREE_SIDE_EFFECTS (omitted))
2828 return build2 (COMPOUND_EXPR, type, omitted, t);
2830 return pedantic_non_lvalue (t);
2833 /* Return a simplified tree node for the truth-negation of ARG. This
2834 never alters ARG itself. We assume that ARG is an operation that
2835 returns a truth value (0 or 1).
2837 FIXME: one would think we would fold the result, but it causes
2838 problems with the dominator optimizer. */
2840 invert_truthvalue (tree arg)
2842 tree type = TREE_TYPE (arg);
2843 enum tree_code code = TREE_CODE (arg);
2845 if (code == ERROR_MARK)
2848 /* If this is a comparison, we can simply invert it, except for
2849 floating-point non-equality comparisons, in which case we just
2850 enclose a TRUTH_NOT_EXPR around what we have. */
2852 if (TREE_CODE_CLASS (code) == '<')
2854 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2855 if (FLOAT_TYPE_P (op_type)
2856 && flag_trapping_math
2857 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2858 && code != NE_EXPR && code != EQ_EXPR)
2859 return build1 (TRUTH_NOT_EXPR, type, arg);
2862 code = invert_tree_comparison (code,
2863 HONOR_NANS (TYPE_MODE (op_type)));
2864 if (code == ERROR_MARK)
2865 return build1 (TRUTH_NOT_EXPR, type, arg);
2867 return build2 (code, type,
2868 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2875 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2877 case TRUTH_AND_EXPR:
2878 return build2 (TRUTH_OR_EXPR, type,
2879 invert_truthvalue (TREE_OPERAND (arg, 0)),
2880 invert_truthvalue (TREE_OPERAND (arg, 1)));
2883 return build2 (TRUTH_AND_EXPR, type,
2884 invert_truthvalue (TREE_OPERAND (arg, 0)),
2885 invert_truthvalue (TREE_OPERAND (arg, 1)));
2887 case TRUTH_XOR_EXPR:
2888 /* Here we can invert either operand. We invert the first operand
2889 unless the second operand is a TRUTH_NOT_EXPR in which case our
2890 result is the XOR of the first operand with the inside of the
2891 negation of the second operand. */
2893 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2894 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2895 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2897 return build2 (TRUTH_XOR_EXPR, type,
2898 invert_truthvalue (TREE_OPERAND (arg, 0)),
2899 TREE_OPERAND (arg, 1));
2901 case TRUTH_ANDIF_EXPR:
2902 return build2 (TRUTH_ORIF_EXPR, type,
2903 invert_truthvalue (TREE_OPERAND (arg, 0)),
2904 invert_truthvalue (TREE_OPERAND (arg, 1)));
2906 case TRUTH_ORIF_EXPR:
2907 return build2 (TRUTH_ANDIF_EXPR, type,
2908 invert_truthvalue (TREE_OPERAND (arg, 0)),
2909 invert_truthvalue (TREE_OPERAND (arg, 1)));
2911 case TRUTH_NOT_EXPR:
2912 return TREE_OPERAND (arg, 0);
2915 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2916 invert_truthvalue (TREE_OPERAND (arg, 1)),
2917 invert_truthvalue (TREE_OPERAND (arg, 2)));
2920 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2921 invert_truthvalue (TREE_OPERAND (arg, 1)));
2923 case NON_LVALUE_EXPR:
2924 return invert_truthvalue (TREE_OPERAND (arg, 0));
2927 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2932 return build1 (TREE_CODE (arg), type,
2933 invert_truthvalue (TREE_OPERAND (arg, 0)));
2936 if (!integer_onep (TREE_OPERAND (arg, 1)))
2938 return build2 (EQ_EXPR, type, arg,
2939 fold_convert (type, integer_zero_node));
2942 return build1 (TRUTH_NOT_EXPR, type, arg);
2944 case CLEANUP_POINT_EXPR:
2945 return build1 (CLEANUP_POINT_EXPR, type,
2946 invert_truthvalue (TREE_OPERAND (arg, 0)));
2951 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2953 return build1 (TRUTH_NOT_EXPR, type, arg);
2956 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2957 operands are another bit-wise operation with a common input. If so,
2958 distribute the bit operations to save an operation and possibly two if
2959 constants are involved. For example, convert
2960 (A | B) & (A | C) into A | (B & C)
2961 Further simplification will occur if B and C are constants.
2963 If this optimization cannot be done, 0 will be returned. */
2966 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2971 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2972 || TREE_CODE (arg0) == code
2973 || (TREE_CODE (arg0) != BIT_AND_EXPR
2974 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2977 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2979 common = TREE_OPERAND (arg0, 0);
2980 left = TREE_OPERAND (arg0, 1);
2981 right = TREE_OPERAND (arg1, 1);
2983 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2985 common = TREE_OPERAND (arg0, 0);
2986 left = TREE_OPERAND (arg0, 1);
2987 right = TREE_OPERAND (arg1, 0);
2989 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2991 common = TREE_OPERAND (arg0, 1);
2992 left = TREE_OPERAND (arg0, 0);
2993 right = TREE_OPERAND (arg1, 1);
2995 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2997 common = TREE_OPERAND (arg0, 1);
2998 left = TREE_OPERAND (arg0, 0);
2999 right = TREE_OPERAND (arg1, 0);
3004 return fold (build2 (TREE_CODE (arg0), type, common,
3005 fold (build2 (code, type, left, right))));
3008 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3009 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3012 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3015 tree result = build3 (BIT_FIELD_REF, type, inner,
3016 size_int (bitsize), bitsize_int (bitpos));
3018 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3023 /* Optimize a bit-field compare.
3025 There are two cases: First is a compare against a constant and the
3026 second is a comparison of two items where the fields are at the same
3027 bit position relative to the start of a chunk (byte, halfword, word)
3028 large enough to contain it. In these cases we can avoid the shift
3029 implicit in bitfield extractions.
3031 For constants, we emit a compare of the shifted constant with the
3032 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3033 compared. For two fields at the same position, we do the ANDs with the
3034 similar mask and compare the result of the ANDs.
3036 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3037 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3038 are the left and right operands of the comparison, respectively.
3040 If the optimization described above can be done, we return the resulting
3041 tree. Otherwise we return zero. */
3044 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3047 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3048 tree type = TREE_TYPE (lhs);
3049 tree signed_type, unsigned_type;
3050 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3051 enum machine_mode lmode, rmode, nmode;
3052 int lunsignedp, runsignedp;
3053 int lvolatilep = 0, rvolatilep = 0;
3054 tree linner, rinner = NULL_TREE;
3058 /* Get all the information about the extractions being done. If the bit size
3059 if the same as the size of the underlying object, we aren't doing an
3060 extraction at all and so can do nothing. We also don't want to
3061 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3062 then will no longer be able to replace it. */
3063 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3064 &lunsignedp, &lvolatilep);
3065 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3066 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3071 /* If this is not a constant, we can only do something if bit positions,
3072 sizes, and signedness are the same. */
3073 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3074 &runsignedp, &rvolatilep);
3076 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3077 || lunsignedp != runsignedp || offset != 0
3078 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3082 /* See if we can find a mode to refer to this field. We should be able to,
3083 but fail if we can't. */
3084 nmode = get_best_mode (lbitsize, lbitpos,
3085 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3086 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3087 TYPE_ALIGN (TREE_TYPE (rinner))),
3088 word_mode, lvolatilep || rvolatilep);
3089 if (nmode == VOIDmode)
3092 /* Set signed and unsigned types of the precision of this mode for the
3094 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3095 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3097 /* Compute the bit position and size for the new reference and our offset
3098 within it. If the new reference is the same size as the original, we
3099 won't optimize anything, so return zero. */
3100 nbitsize = GET_MODE_BITSIZE (nmode);
3101 nbitpos = lbitpos & ~ (nbitsize - 1);
3103 if (nbitsize == lbitsize)
3106 if (BYTES_BIG_ENDIAN)
3107 lbitpos = nbitsize - lbitsize - lbitpos;
3109 /* Make the mask to be used against the extracted field. */
3110 mask = build_int_2 (~0, ~0);
3111 TREE_TYPE (mask) = unsigned_type;
3112 force_fit_type (mask, 0);
3113 mask = fold_convert (unsigned_type, mask);
3114 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3115 mask = const_binop (RSHIFT_EXPR, mask,
3116 size_int (nbitsize - lbitsize - lbitpos), 0);
3119 /* If not comparing with constant, just rework the comparison
3121 return build2 (code, compare_type,
3122 build2 (BIT_AND_EXPR, unsigned_type,
3123 make_bit_field_ref (linner, unsigned_type,
3124 nbitsize, nbitpos, 1),
3126 build2 (BIT_AND_EXPR, unsigned_type,
3127 make_bit_field_ref (rinner, unsigned_type,
3128 nbitsize, nbitpos, 1),
3131 /* Otherwise, we are handling the constant case. See if the constant is too
3132 big for the field. Warn and return a tree of for 0 (false) if so. We do
3133 this not only for its own sake, but to avoid having to test for this
3134 error case below. If we didn't, we might generate wrong code.
3136 For unsigned fields, the constant shifted right by the field length should
3137 be all zero. For signed fields, the high-order bits should agree with
3142 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3143 fold_convert (unsigned_type, rhs),
3144 size_int (lbitsize), 0)))
3146 warning ("comparison is always %d due to width of bit-field",
3148 return constant_boolean_node (code == NE_EXPR, compare_type);
3153 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3154 size_int (lbitsize - 1), 0);
3155 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3157 warning ("comparison is always %d due to width of bit-field",
3159 return constant_boolean_node (code == NE_EXPR, compare_type);
3163 /* Single-bit compares should always be against zero. */
3164 if (lbitsize == 1 && ! integer_zerop (rhs))
3166 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3167 rhs = fold_convert (type, integer_zero_node);
3170 /* Make a new bitfield reference, shift the constant over the
3171 appropriate number of bits and mask it with the computed mask
3172 (in case this was a signed field). If we changed it, make a new one. */
3173 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3176 TREE_SIDE_EFFECTS (lhs) = 1;
3177 TREE_THIS_VOLATILE (lhs) = 1;
3180 rhs = fold (const_binop (BIT_AND_EXPR,
3181 const_binop (LSHIFT_EXPR,
3182 fold_convert (unsigned_type, rhs),
3183 size_int (lbitpos), 0),
3186 return build2 (code, compare_type,
3187 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3191 /* Subroutine for fold_truthop: decode a field reference.
3193 If EXP is a comparison reference, we return the innermost reference.
3195 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3196 set to the starting bit number.
3198 If the innermost field can be completely contained in a mode-sized
3199 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3201 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3202 otherwise it is not changed.
3204 *PUNSIGNEDP is set to the signedness of the field.
3206 *PMASK is set to the mask used. This is either contained in a
3207 BIT_AND_EXPR or derived from the width of the field.
3209 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3211 Return 0 if this is not a component reference or is one that we can't
3212 do anything with. */
3215 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3216 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3217 int *punsignedp, int *pvolatilep,
3218 tree *pmask, tree *pand_mask)
3220 tree outer_type = 0;
3222 tree mask, inner, offset;
3224 unsigned int precision;
3226 /* All the optimizations using this function assume integer fields.
3227 There are problems with FP fields since the type_for_size call
3228 below can fail for, e.g., XFmode. */
3229 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3232 /* We are interested in the bare arrangement of bits, so strip everything
3233 that doesn't affect the machine mode. However, record the type of the
3234 outermost expression if it may matter below. */
3235 if (TREE_CODE (exp) == NOP_EXPR
3236 || TREE_CODE (exp) == CONVERT_EXPR
3237 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3238 outer_type = TREE_TYPE (exp);
3241 if (TREE_CODE (exp) == BIT_AND_EXPR)
3243 and_mask = TREE_OPERAND (exp, 1);
3244 exp = TREE_OPERAND (exp, 0);
3245 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3246 if (TREE_CODE (and_mask) != INTEGER_CST)
3250 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3251 punsignedp, pvolatilep);
3252 if ((inner == exp && and_mask == 0)
3253 || *pbitsize < 0 || offset != 0
3254 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3257 /* If the number of bits in the reference is the same as the bitsize of
3258 the outer type, then the outer type gives the signedness. Otherwise
3259 (in case of a small bitfield) the signedness is unchanged. */
3260 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3261 *punsignedp = TYPE_UNSIGNED (outer_type);
3263 /* Compute the mask to access the bitfield. */
3264 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3265 precision = TYPE_PRECISION (unsigned_type);
3267 mask = build_int_2 (~0, ~0);
3268 TREE_TYPE (mask) = unsigned_type;
3269 force_fit_type (mask, 0);
3270 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3271 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3273 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3275 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3276 fold_convert (unsigned_type, and_mask), mask));
3279 *pand_mask = and_mask;
3283 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3287 all_ones_mask_p (tree mask, int size)
3289 tree type = TREE_TYPE (mask);
3290 unsigned int precision = TYPE_PRECISION (type);
3293 tmask = build_int_2 (~0, ~0);
3294 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3295 force_fit_type (tmask, 0);
3297 tree_int_cst_equal (mask,
3298 const_binop (RSHIFT_EXPR,
3299 const_binop (LSHIFT_EXPR, tmask,
3300 size_int (precision - size),
3302 size_int (precision - size), 0));
3305 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3306 represents the sign bit of EXP's type. If EXP represents a sign
3307 or zero extension, also test VAL against the unextended type.
3308 The return value is the (sub)expression whose sign bit is VAL,
3309 or NULL_TREE otherwise. */
3312 sign_bit_p (tree exp, tree val)
3314 unsigned HOST_WIDE_INT mask_lo, lo;
3315 HOST_WIDE_INT mask_hi, hi;
3319 /* Tree EXP must have an integral type. */
3320 t = TREE_TYPE (exp);
3321 if (! INTEGRAL_TYPE_P (t))
3324 /* Tree VAL must be an integer constant. */
3325 if (TREE_CODE (val) != INTEGER_CST
3326 || TREE_CONSTANT_OVERFLOW (val))
3329 width = TYPE_PRECISION (t);
3330 if (width > HOST_BITS_PER_WIDE_INT)
3332 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3335 mask_hi = ((unsigned HOST_WIDE_INT) -1
3336 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3342 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3345 mask_lo = ((unsigned HOST_WIDE_INT) -1
3346 >> (HOST_BITS_PER_WIDE_INT - width));
3349 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3350 treat VAL as if it were unsigned. */
3351 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3352 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3355 /* Handle extension from a narrower type. */
3356 if (TREE_CODE (exp) == NOP_EXPR
3357 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3358 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3363 /* Subroutine for fold_truthop: determine if an operand is simple enough
3364 to be evaluated unconditionally. */
3367 simple_operand_p (tree exp)
3369 /* Strip any conversions that don't change the machine mode. */
3370 while ((TREE_CODE (exp) == NOP_EXPR
3371 || TREE_CODE (exp) == CONVERT_EXPR)
3372 && (TYPE_MODE (TREE_TYPE (exp))
3373 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3374 exp = TREE_OPERAND (exp, 0);
3376 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3378 && ! TREE_ADDRESSABLE (exp)
3379 && ! TREE_THIS_VOLATILE (exp)
3380 && ! DECL_NONLOCAL (exp)
3381 /* Don't regard global variables as simple. They may be
3382 allocated in ways unknown to the compiler (shared memory,
3383 #pragma weak, etc). */
3384 && ! TREE_PUBLIC (exp)
3385 && ! DECL_EXTERNAL (exp)
3386 /* Loading a static variable is unduly expensive, but global
3387 registers aren't expensive. */
3388 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3391 /* The following functions are subroutines to fold_range_test and allow it to
3392 try to change a logical combination of comparisons into a range test.
3395 X == 2 || X == 3 || X == 4 || X == 5
3399 (unsigned) (X - 2) <= 3
3401 We describe each set of comparisons as being either inside or outside
3402 a range, using a variable named like IN_P, and then describe the
3403 range with a lower and upper bound. If one of the bounds is omitted,
3404 it represents either the highest or lowest value of the type.
3406 In the comments below, we represent a range by two numbers in brackets
3407 preceded by a "+" to designate being inside that range, or a "-" to
3408 designate being outside that range, so the condition can be inverted by
3409 flipping the prefix. An omitted bound is represented by a "-". For
3410 example, "- [-, 10]" means being outside the range starting at the lowest
3411 possible value and ending at 10, in other words, being greater than 10.
3412 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3415 We set up things so that the missing bounds are handled in a consistent
3416 manner so neither a missing bound nor "true" and "false" need to be
3417 handled using a special case. */
3419 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3420 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3421 and UPPER1_P are nonzero if the respective argument is an upper bound
3422 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3423 must be specified for a comparison. ARG1 will be converted to ARG0's
3424 type if both are specified. */
3427 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3428 tree arg1, int upper1_p)
3434 /* If neither arg represents infinity, do the normal operation.
3435 Else, if not a comparison, return infinity. Else handle the special
3436 comparison rules. Note that most of the cases below won't occur, but
3437 are handled for consistency. */
3439 if (arg0 != 0 && arg1 != 0)
3441 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3442 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3444 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3447 if (TREE_CODE_CLASS (code) != '<')
3450 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3451 for neither. In real maths, we cannot assume open ended ranges are
3452 the same. But, this is computer arithmetic, where numbers are finite.
3453 We can therefore make the transformation of any unbounded range with
3454 the value Z, Z being greater than any representable number. This permits
3455 us to treat unbounded ranges as equal. */
3456 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3457 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3461 result = sgn0 == sgn1;
3464 result = sgn0 != sgn1;
3467 result = sgn0 < sgn1;
3470 result = sgn0 <= sgn1;
3473 result = sgn0 > sgn1;
3476 result = sgn0 >= sgn1;
3482 return constant_boolean_node (result, type);
3485 /* Given EXP, a logical expression, set the range it is testing into
3486 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3487 actually being tested. *PLOW and *PHIGH will be made of the same type
3488 as the returned expression. If EXP is not a comparison, we will most
3489 likely not be returning a useful value and range. */
3492 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3494 enum tree_code code;
3495 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3496 tree orig_type = NULL_TREE;
3498 tree low, high, n_low, n_high;
3500 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3501 and see if we can refine the range. Some of the cases below may not
3502 happen, but it doesn't seem worth worrying about this. We "continue"
3503 the outer loop when we've changed something; otherwise we "break"
3504 the switch, which will "break" the while. */
3507 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3511 code = TREE_CODE (exp);
3513 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3515 if (first_rtl_op (code) > 0)
3516 arg0 = TREE_OPERAND (exp, 0);
3517 if (TREE_CODE_CLASS (code) == '<'
3518 || TREE_CODE_CLASS (code) == '1'
3519 || TREE_CODE_CLASS (code) == '2')
3520 type = TREE_TYPE (arg0);
3521 if (TREE_CODE_CLASS (code) == '2'
3522 || TREE_CODE_CLASS (code) == '<'
3523 || (TREE_CODE_CLASS (code) == 'e'
3524 && TREE_CODE_LENGTH (code) > 1))
3525 arg1 = TREE_OPERAND (exp, 1);
3528 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3529 lose a cast by accident. */
3530 if (type != NULL_TREE && orig_type == NULL_TREE)
3535 case TRUTH_NOT_EXPR:
3536 in_p = ! in_p, exp = arg0;
3539 case EQ_EXPR: case NE_EXPR:
3540 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3541 /* We can only do something if the range is testing for zero
3542 and if the second operand is an integer constant. Note that
3543 saying something is "in" the range we make is done by
3544 complementing IN_P since it will set in the initial case of
3545 being not equal to zero; "out" is leaving it alone. */
3546 if (low == 0 || high == 0
3547 || ! integer_zerop (low) || ! integer_zerop (high)
3548 || TREE_CODE (arg1) != INTEGER_CST)
3553 case NE_EXPR: /* - [c, c] */
3556 case EQ_EXPR: /* + [c, c] */
3557 in_p = ! in_p, low = high = arg1;
3559 case GT_EXPR: /* - [-, c] */
3560 low = 0, high = arg1;
3562 case GE_EXPR: /* + [c, -] */
3563 in_p = ! in_p, low = arg1, high = 0;
3565 case LT_EXPR: /* - [c, -] */
3566 low = arg1, high = 0;
3568 case LE_EXPR: /* + [-, c] */
3569 in_p = ! in_p, low = 0, high = arg1;
3577 /* If this is an unsigned comparison, we also know that EXP is
3578 greater than or equal to zero. We base the range tests we make
3579 on that fact, so we record it here so we can parse existing
3581 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3583 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3584 1, fold_convert (type, integer_zero_node),
3588 in_p = n_in_p, low = n_low, high = n_high;
3590 /* If the high bound is missing, but we have a nonzero low
3591 bound, reverse the range so it goes from zero to the low bound
3593 if (high == 0 && low && ! integer_zerop (low))
3596 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3597 integer_one_node, 0);
3598 low = fold_convert (type, integer_zero_node);
3604 /* (-x) IN [a,b] -> x in [-b, -a] */
3605 n_low = range_binop (MINUS_EXPR, type,
3606 fold_convert (type, integer_zero_node),
3608 n_high = range_binop (MINUS_EXPR, type,
3609 fold_convert (type, integer_zero_node),
3611 low = n_low, high = n_high;
3617 exp = build2 (MINUS_EXPR, type, negate_expr (arg0),
3618 fold_convert (type, integer_one_node));
3621 case PLUS_EXPR: case MINUS_EXPR:
3622 if (TREE_CODE (arg1) != INTEGER_CST)
3625 /* If EXP is signed, any overflow in the computation is undefined,
3626 so we don't worry about it so long as our computations on
3627 the bounds don't overflow. For unsigned, overflow is defined
3628 and this is exactly the right thing. */
3629 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3630 type, low, 0, arg1, 0);
3631 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3632 type, high, 1, arg1, 0);
3633 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3634 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3637 /* Check for an unsigned range which has wrapped around the maximum
3638 value thus making n_high < n_low, and normalize it. */
3639 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3641 low = range_binop (PLUS_EXPR, type, n_high, 0,
3642 integer_one_node, 0);
3643 high = range_binop (MINUS_EXPR, type, n_low, 0,
3644 integer_one_node, 0);
3646 /* If the range is of the form +/- [ x+1, x ], we won't
3647 be able to normalize it. But then, it represents the
3648 whole range or the empty set, so make it
3650 if (tree_int_cst_equal (n_low, low)
3651 && tree_int_cst_equal (n_high, high))
3657 low = n_low, high = n_high;
3662 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3663 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3666 if (! INTEGRAL_TYPE_P (type)
3667 || (low != 0 && ! int_fits_type_p (low, type))
3668 || (high != 0 && ! int_fits_type_p (high, type)))
3671 n_low = low, n_high = high;
3674 n_low = fold_convert (type, n_low);
3677 n_high = fold_convert (type, n_high);
3679 /* If we're converting from an unsigned to a signed type,
3680 we will be doing the comparison as unsigned. The tests above
3681 have already verified that LOW and HIGH are both positive.
3683 So we have to make sure that the original unsigned value will
3684 be interpreted as positive. */
3685 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3687 tree equiv_type = lang_hooks.types.type_for_mode
3688 (TYPE_MODE (type), 1);
3691 /* A range without an upper bound is, naturally, unbounded.
3692 Since convert would have cropped a very large value, use
3693 the max value for the destination type. */
3695 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3696 : TYPE_MAX_VALUE (type);
3698 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3699 high_positive = fold (build2 (RSHIFT_EXPR, type,
3703 integer_one_node)));
3705 /* If the low bound is specified, "and" the range with the
3706 range for which the original unsigned value will be
3710 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3711 1, n_low, n_high, 1,
3712 fold_convert (type, integer_zero_node),
3716 in_p = (n_in_p == in_p);
3720 /* Otherwise, "or" the range with the range of the input
3721 that will be interpreted as negative. */
3722 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3723 0, n_low, n_high, 1,
3724 fold_convert (type, integer_zero_node),
3728 in_p = (in_p != n_in_p);
3733 low = n_low, high = n_high;
3743 /* If EXP is a constant, we can evaluate whether this is true or false. */
3744 if (TREE_CODE (exp) == INTEGER_CST)
3746 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3748 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3754 *pin_p = in_p, *plow = low, *phigh = high;
3758 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3759 type, TYPE, return an expression to test if EXP is in (or out of, depending
3760 on IN_P) the range. */
3763 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3765 tree etype = TREE_TYPE (exp);
3769 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3770 return invert_truthvalue (value);
3772 if (low == 0 && high == 0)
3773 return fold_convert (type, integer_one_node);
3776 return fold (build2 (LE_EXPR, type, exp, high));
3779 return fold (build2 (GE_EXPR, type, exp, low));
3781 if (operand_equal_p (low, high, 0))
3782 return fold (build2 (EQ_EXPR, type, exp, low));
3784 if (integer_zerop (low))
3786 if (! TYPE_UNSIGNED (etype))
3788 etype = lang_hooks.types.unsigned_type (etype);
3789 high = fold_convert (etype, high);
3790 exp = fold_convert (etype, exp);
3792 return build_range_check (type, exp, 1, 0, high);
3795 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3796 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3798 unsigned HOST_WIDE_INT lo;
3802 prec = TYPE_PRECISION (etype);
3803 if (prec <= HOST_BITS_PER_WIDE_INT)
3806 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3810 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3811 lo = (unsigned HOST_WIDE_INT) -1;
3814 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3816 if (TYPE_UNSIGNED (etype))
3818 etype = lang_hooks.types.signed_type (etype);
3819 exp = fold_convert (etype, exp);
3821 return fold (build2 (GT_EXPR, type, exp,
3822 fold_convert (etype, integer_zero_node)));
3826 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3827 && ! TREE_OVERFLOW (value))
3828 return build_range_check (type,
3829 fold (build2 (MINUS_EXPR, etype, exp, low)),
3830 1, fold_convert (etype, integer_zero_node),
3836 /* Given two ranges, see if we can merge them into one. Return 1 if we
3837 can, 0 if we can't. Set the output range into the specified parameters. */
3840 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3841 tree high0, int in1_p, tree low1, tree high1)
3849 int lowequal = ((low0 == 0 && low1 == 0)
3850 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3851 low0, 0, low1, 0)));
3852 int highequal = ((high0 == 0 && high1 == 0)
3853 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3854 high0, 1, high1, 1)));
3856 /* Make range 0 be the range that starts first, or ends last if they
3857 start at the same value. Swap them if it isn't. */
3858 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3861 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3862 high1, 1, high0, 1))))
3864 temp = in0_p, in0_p = in1_p, in1_p = temp;
3865 tem = low0, low0 = low1, low1 = tem;
3866 tem = high0, high0 = high1, high1 = tem;
3869 /* Now flag two cases, whether the ranges are disjoint or whether the
3870 second range is totally subsumed in the first. Note that the tests
3871 below are simplified by the ones above. */
3872 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3873 high0, 1, low1, 0));
3874 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3875 high1, 1, high0, 1));
3877 /* We now have four cases, depending on whether we are including or
3878 excluding the two ranges. */
3881 /* If they don't overlap, the result is false. If the second range
3882 is a subset it is the result. Otherwise, the range is from the start
3883 of the second to the end of the first. */
3885 in_p = 0, low = high = 0;
3887 in_p = 1, low = low1, high = high1;
3889 in_p = 1, low = low1, high = high0;
3892 else if (in0_p && ! in1_p)
3894 /* If they don't overlap, the result is the first range. If they are
3895 equal, the result is false. If the second range is a subset of the
3896 first, and the ranges begin at the same place, we go from just after
3897 the end of the first range to the end of the second. If the second
3898 range is not a subset of the first, or if it is a subset and both
3899 ranges end at the same place, the range starts at the start of the
3900 first range and ends just before the second range.
3901 Otherwise, we can't describe this as a single range. */
3903 in_p = 1, low = low0, high = high0;
3904 else if (lowequal && highequal)
3905 in_p = 0, low = high = 0;
3906 else if (subset && lowequal)
3908 in_p = 1, high = high0;
3909 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3910 integer_one_node, 0);
3912 else if (! subset || highequal)
3914 in_p = 1, low = low0;
3915 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3916 integer_one_node, 0);
3922 else if (! in0_p && in1_p)
3924 /* If they don't overlap, the result is the second range. If the second
3925 is a subset of the first, the result is false. Otherwise,
3926 the range starts just after the first range and ends at the
3927 end of the second. */
3929 in_p = 1, low = low1, high = high1;
3930 else if (subset || highequal)
3931 in_p = 0, low = high = 0;
3934 in_p = 1, high = high1;
3935 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3936 integer_one_node, 0);
3942 /* The case where we are excluding both ranges. Here the complex case
3943 is if they don't overlap. In that case, the only time we have a
3944 range is if they are adjacent. If the second is a subset of the
3945 first, the result is the first. Otherwise, the range to exclude
3946 starts at the beginning of the first range and ends at the end of the
3950 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3951 range_binop (PLUS_EXPR, NULL_TREE,
3953 integer_one_node, 1),
3955 in_p = 0, low = low0, high = high1;
3960 in_p = 0, low = low0, high = high0;
3962 in_p = 0, low = low0, high = high1;
3965 *pin_p = in_p, *plow = low, *phigh = high;
3969 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3970 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3973 /* EXP is some logical combination of boolean tests. See if we can
3974 merge it into some range test. Return the new tree if so. */
3977 fold_range_test (tree exp)
3979 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3980 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3981 int in0_p, in1_p, in_p;
3982 tree low0, low1, low, high0, high1, high;
3983 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3984 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3987 /* If this is an OR operation, invert both sides; we will invert
3988 again at the end. */
3990 in0_p = ! in0_p, in1_p = ! in1_p;
3992 /* If both expressions are the same, if we can merge the ranges, and we
3993 can build the range test, return it or it inverted. If one of the
3994 ranges is always true or always false, consider it to be the same
3995 expression as the other. */
3996 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3997 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3999 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4001 : rhs != 0 ? rhs : integer_zero_node,
4003 return or_op ? invert_truthvalue (tem) : tem;
4005 /* On machines where the branch cost is expensive, if this is a
4006 short-circuited branch and the underlying object on both sides
4007 is the same, make a non-short-circuit operation. */
4008 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4009 && lhs != 0 && rhs != 0
4010 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4011 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4012 && operand_equal_p (lhs, rhs, 0))
4014 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4015 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4016 which cases we can't do this. */
4017 if (simple_operand_p (lhs))
4018 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4019 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4020 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4021 TREE_OPERAND (exp, 1));
4023 else if (lang_hooks.decls.global_bindings_p () == 0
4024 && ! CONTAINS_PLACEHOLDER_P (lhs))
4026 tree common = save_expr (lhs);
4028 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4029 or_op ? ! in0_p : in0_p,
4031 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4032 or_op ? ! in1_p : in1_p,
4034 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4035 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4036 TREE_TYPE (exp), lhs, rhs);
4043 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4044 bit value. Arrange things so the extra bits will be set to zero if and
4045 only if C is signed-extended to its full width. If MASK is nonzero,
4046 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4049 unextend (tree c, int p, int unsignedp, tree mask)
4051 tree type = TREE_TYPE (c);
4052 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4055 if (p == modesize || unsignedp)
4058 /* We work by getting just the sign bit into the low-order bit, then
4059 into the high-order bit, then sign-extend. We then XOR that value
4061 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4062 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4064 /* We must use a signed type in order to get an arithmetic right shift.
4065 However, we must also avoid introducing accidental overflows, so that
4066 a subsequent call to integer_zerop will work. Hence we must
4067 do the type conversion here. At this point, the constant is either
4068 zero or one, and the conversion to a signed type can never overflow.
4069 We could get an overflow if this conversion is done anywhere else. */
4070 if (TYPE_UNSIGNED (type))
4071 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4073 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4074 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4076 temp = const_binop (BIT_AND_EXPR, temp,
4077 fold_convert (TREE_TYPE (c), mask), 0);
4078 /* If necessary, convert the type back to match the type of C. */
4079 if (TYPE_UNSIGNED (type))
4080 temp = fold_convert (type, temp);
4082 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4085 /* Find ways of folding logical expressions of LHS and RHS:
4086 Try to merge two comparisons to the same innermost item.
4087 Look for range tests like "ch >= '0' && ch <= '9'".
4088 Look for combinations of simple terms on machines with expensive branches
4089 and evaluate the RHS unconditionally.
4091 For example, if we have p->a == 2 && p->b == 4 and we can make an
4092 object large enough to span both A and B, we can do this with a comparison
4093 against the object ANDed with the a mask.
4095 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4096 operations to do this with one comparison.
4098 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4099 function and the one above.
4101 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4102 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4104 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4107 We return the simplified tree or 0 if no optimization is possible. */
4110 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4112 /* If this is the "or" of two comparisons, we can do something if
4113 the comparisons are NE_EXPR. If this is the "and", we can do something
4114 if the comparisons are EQ_EXPR. I.e.,
4115 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4117 WANTED_CODE is this operation code. For single bit fields, we can
4118 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4119 comparison for one-bit fields. */
4121 enum tree_code wanted_code;
4122 enum tree_code lcode, rcode;
4123 tree ll_arg, lr_arg, rl_arg, rr_arg;
4124 tree ll_inner, lr_inner, rl_inner, rr_inner;
4125 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4126 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4127 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4128 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4129 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4130 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4131 enum machine_mode lnmode, rnmode;
4132 tree ll_mask, lr_mask, rl_mask, rr_mask;
4133 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4134 tree l_const, r_const;
4135 tree lntype, rntype, result;
4136 int first_bit, end_bit;
4139 /* Start by getting the comparison codes. Fail if anything is volatile.
4140 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4141 it were surrounded with a NE_EXPR. */
4143 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4146 lcode = TREE_CODE (lhs);
4147 rcode = TREE_CODE (rhs);
4149 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4151 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4155 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4157 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4161 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4164 ll_arg = TREE_OPERAND (lhs, 0);
4165 lr_arg = TREE_OPERAND (lhs, 1);
4166 rl_arg = TREE_OPERAND (rhs, 0);
4167 rr_arg = TREE_OPERAND (rhs, 1);
4169 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4170 if (simple_operand_p (ll_arg)
4171 && simple_operand_p (lr_arg))
4174 if (operand_equal_p (ll_arg, rl_arg, 0)
4175 && operand_equal_p (lr_arg, rr_arg, 0))
4177 result = combine_comparisons (code, lcode, rcode,
4178 truth_type, ll_arg, lr_arg);
4182 else if (operand_equal_p (ll_arg, rr_arg, 0)
4183 && operand_equal_p (lr_arg, rl_arg, 0))
4185 result = combine_comparisons (code, lcode,
4186 swap_tree_comparison (rcode),
4187 truth_type, ll_arg, lr_arg);
4193 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4194 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4196 /* If the RHS can be evaluated unconditionally and its operands are
4197 simple, it wins to evaluate the RHS unconditionally on machines
4198 with expensive branches. In this case, this isn't a comparison
4199 that can be merged. Avoid doing this if the RHS is a floating-point
4200 comparison since those can trap. */
4202 if (BRANCH_COST >= 2
4203 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4204 && simple_operand_p (rl_arg)
4205 && simple_operand_p (rr_arg))
4207 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4208 if (code == TRUTH_OR_EXPR
4209 && lcode == NE_EXPR && integer_zerop (lr_arg)
4210 && rcode == NE_EXPR && integer_zerop (rr_arg)
4211 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4212 return build2 (NE_EXPR, truth_type,
4213 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4215 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4217 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4218 if (code == TRUTH_AND_EXPR
4219 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4220 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4221 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4222 return build2 (EQ_EXPR, truth_type,
4223 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4225 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4227 return build2 (code, truth_type, lhs, rhs);
4230 /* See if the comparisons can be merged. Then get all the parameters for
4233 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4234 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4238 ll_inner = decode_field_reference (ll_arg,
4239 &ll_bitsize, &ll_bitpos, &ll_mode,
4240 &ll_unsignedp, &volatilep, &ll_mask,
4242 lr_inner = decode_field_reference (lr_arg,
4243 &lr_bitsize, &lr_bitpos, &lr_mode,
4244 &lr_unsignedp, &volatilep, &lr_mask,
4246 rl_inner = decode_field_reference (rl_arg,
4247 &rl_bitsize, &rl_bitpos, &rl_mode,
4248 &rl_unsignedp, &volatilep, &rl_mask,
4250 rr_inner = decode_field_reference (rr_arg,
4251 &rr_bitsize, &rr_bitpos, &rr_mode,
4252 &rr_unsignedp, &volatilep, &rr_mask,
4255 /* It must be true that the inner operation on the lhs of each
4256 comparison must be the same if we are to be able to do anything.
4257 Then see if we have constants. If not, the same must be true for
4259 if (volatilep || ll_inner == 0 || rl_inner == 0
4260 || ! operand_equal_p (ll_inner, rl_inner, 0))
4263 if (TREE_CODE (lr_arg) == INTEGER_CST
4264 && TREE_CODE (rr_arg) == INTEGER_CST)
4265 l_const = lr_arg, r_const = rr_arg;
4266 else if (lr_inner == 0 || rr_inner == 0
4267 || ! operand_equal_p (lr_inner, rr_inner, 0))
4270 l_const = r_const = 0;
4272 /* If either comparison code is not correct for our logical operation,
4273 fail. However, we can convert a one-bit comparison against zero into
4274 the opposite comparison against that bit being set in the field. */
4276 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4277 if (lcode != wanted_code)
4279 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4281 /* Make the left operand unsigned, since we are only interested
4282 in the value of one bit. Otherwise we are doing the wrong
4291 /* This is analogous to the code for l_const above. */
4292 if (rcode != wanted_code)
4294 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4303 /* After this point all optimizations will generate bit-field
4304 references, which we might not want. */
4305 if (! lang_hooks.can_use_bit_fields_p ())
4308 /* See if we can find a mode that contains both fields being compared on
4309 the left. If we can't, fail. Otherwise, update all constants and masks
4310 to be relative to a field of that size. */
4311 first_bit = MIN (ll_bitpos, rl_bitpos);
4312 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4313 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4314 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4316 if (lnmode == VOIDmode)
4319 lnbitsize = GET_MODE_BITSIZE (lnmode);
4320 lnbitpos = first_bit & ~ (lnbitsize - 1);
4321 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4322 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4324 if (BYTES_BIG_ENDIAN)
4326 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4327 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4330 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4331 size_int (xll_bitpos), 0);
4332 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4333 size_int (xrl_bitpos), 0);
4337 l_const = fold_convert (lntype, l_const);
4338 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4339 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4340 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4341 fold (build1 (BIT_NOT_EXPR,
4345 warning ("comparison is always %d", wanted_code == NE_EXPR);
4347 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4352 r_const = fold_convert (lntype, r_const);
4353 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4354 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4355 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4356 fold (build1 (BIT_NOT_EXPR,
4360 warning ("comparison is always %d", wanted_code == NE_EXPR);
4362 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4366 /* If the right sides are not constant, do the same for it. Also,
4367 disallow this optimization if a size or signedness mismatch occurs
4368 between the left and right sides. */
4371 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4372 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4373 /* Make sure the two fields on the right
4374 correspond to the left without being swapped. */
4375 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4378 first_bit = MIN (lr_bitpos, rr_bitpos);
4379 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4380 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4381 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4383 if (rnmode == VOIDmode)
4386 rnbitsize = GET_MODE_BITSIZE (rnmode);
4387 rnbitpos = first_bit & ~ (rnbitsize - 1);
4388 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4389 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4391 if (BYTES_BIG_ENDIAN)
4393 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4394 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4397 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4398 size_int (xlr_bitpos), 0);
4399 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4400 size_int (xrr_bitpos), 0);
4402 /* Make a mask that corresponds to both fields being compared.
4403 Do this for both items being compared. If the operands are the
4404 same size and the bits being compared are in the same position
4405 then we can do this by masking both and comparing the masked
4407 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4408 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4409 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4411 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4412 ll_unsignedp || rl_unsignedp);
4413 if (! all_ones_mask_p (ll_mask, lnbitsize))
4414 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4416 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4417 lr_unsignedp || rr_unsignedp);
4418 if (! all_ones_mask_p (lr_mask, rnbitsize))
4419 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4421 return build2 (wanted_code, truth_type, lhs, rhs);
4424 /* There is still another way we can do something: If both pairs of
4425 fields being compared are adjacent, we may be able to make a wider
4426 field containing them both.
4428 Note that we still must mask the lhs/rhs expressions. Furthermore,
4429 the mask must be shifted to account for the shift done by
4430 make_bit_field_ref. */
4431 if ((ll_bitsize + ll_bitpos == rl_bitpos
4432 && lr_bitsize + lr_bitpos == rr_bitpos)
4433 || (ll_bitpos == rl_bitpos + rl_bitsize
4434 && lr_bitpos == rr_bitpos + rr_bitsize))
4438 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4439 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4440 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4441 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4443 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4444 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4445 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4446 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4448 /* Convert to the smaller type before masking out unwanted bits. */
4450 if (lntype != rntype)
4452 if (lnbitsize > rnbitsize)
4454 lhs = fold_convert (rntype, lhs);
4455 ll_mask = fold_convert (rntype, ll_mask);
4458 else if (lnbitsize < rnbitsize)
4460 rhs = fold_convert (lntype, rhs);
4461 lr_mask = fold_convert (lntype, lr_mask);
4466 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4467 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4469 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4470 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4472 return build2 (wanted_code, truth_type, lhs, rhs);
4478 /* Handle the case of comparisons with constants. If there is something in
4479 common between the masks, those bits of the constants must be the same.
4480 If not, the condition is always false. Test for this to avoid generating
4481 incorrect code below. */
4482 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4483 if (! integer_zerop (result)
4484 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4485 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4487 if (wanted_code == NE_EXPR)
4489 warning ("`or' of unmatched not-equal tests is always 1");
4490 return constant_boolean_node (true, truth_type);
4494 warning ("`and' of mutually exclusive equal-tests is always 0");
4495 return constant_boolean_node (false, truth_type);
4499 /* Construct the expression we will return. First get the component
4500 reference we will make. Unless the mask is all ones the width of
4501 that field, perform the mask operation. Then compare with the
4503 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4504 ll_unsignedp || rl_unsignedp);
4506 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4507 if (! all_ones_mask_p (ll_mask, lnbitsize))
4508 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4510 return build2 (wanted_code, truth_type, result,
4511 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4514 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4518 optimize_minmax_comparison (tree t)
4520 tree type = TREE_TYPE (t);
4521 tree arg0 = TREE_OPERAND (t, 0);
4522 enum tree_code op_code;
4523 tree comp_const = TREE_OPERAND (t, 1);
4525 int consts_equal, consts_lt;
4528 STRIP_SIGN_NOPS (arg0);
4530 op_code = TREE_CODE (arg0);
4531 minmax_const = TREE_OPERAND (arg0, 1);
4532 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4533 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4534 inner = TREE_OPERAND (arg0, 0);
4536 /* If something does not permit us to optimize, return the original tree. */
4537 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4538 || TREE_CODE (comp_const) != INTEGER_CST
4539 || TREE_CONSTANT_OVERFLOW (comp_const)
4540 || TREE_CODE (minmax_const) != INTEGER_CST
4541 || TREE_CONSTANT_OVERFLOW (minmax_const))
4544 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4545 and GT_EXPR, doing the rest with recursive calls using logical
4547 switch (TREE_CODE (t))
4549 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4551 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4555 fold (build2 (TRUTH_ORIF_EXPR, type,
4556 optimize_minmax_comparison
4557 (build2 (EQ_EXPR, type, arg0, comp_const)),
4558 optimize_minmax_comparison
4559 (build2 (GT_EXPR, type, arg0, comp_const))));
4562 if (op_code == MAX_EXPR && consts_equal)
4563 /* MAX (X, 0) == 0 -> X <= 0 */
4564 return fold (build2 (LE_EXPR, type, inner, comp_const));
4566 else if (op_code == MAX_EXPR && consts_lt)
4567 /* MAX (X, 0) == 5 -> X == 5 */
4568 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4570 else if (op_code == MAX_EXPR)
4571 /* MAX (X, 0) == -1 -> false */
4572 return omit_one_operand (type, integer_zero_node, inner);
4574 else if (consts_equal)
4575 /* MIN (X, 0) == 0 -> X >= 0 */
4576 return fold (build2 (GE_EXPR, type, inner, comp_const));
4579 /* MIN (X, 0) == 5 -> false */
4580 return omit_one_operand (type, integer_zero_node, inner);
4583 /* MIN (X, 0) == -1 -> X == -1 */
4584 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4587 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4588 /* MAX (X, 0) > 0 -> X > 0
4589 MAX (X, 0) > 5 -> X > 5 */
4590 return fold (build2 (GT_EXPR, type, inner, comp_const));
4592 else if (op_code == MAX_EXPR)
4593 /* MAX (X, 0) > -1 -> true */
4594 return omit_one_operand (type, integer_one_node, inner);
4596 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4597 /* MIN (X, 0) > 0 -> false
4598 MIN (X, 0) > 5 -> false */
4599 return omit_one_operand (type, integer_zero_node, inner);
4602 /* MIN (X, 0) > -1 -> X > -1 */
4603 return fold (build2 (GT_EXPR, type, inner, comp_const));
4610 /* T is an integer expression that is being multiplied, divided, or taken a
4611 modulus (CODE says which and what kind of divide or modulus) by a
4612 constant C. See if we can eliminate that operation by folding it with
4613 other operations already in T. WIDE_TYPE, if non-null, is a type that
4614 should be used for the computation if wider than our type.
4616 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4617 (X * 2) + (Y * 4). We must, however, be assured that either the original
4618 expression would not overflow or that overflow is undefined for the type
4619 in the language in question.
4621 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4622 the machine has a multiply-accumulate insn or that this is part of an
4623 addressing calculation.
4625 If we return a non-null expression, it is an equivalent form of the
4626 original computation, but need not be in the original type. */
4629 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4631 /* To avoid exponential search depth, refuse to allow recursion past
4632 three levels. Beyond that (1) it's highly unlikely that we'll find
4633 something interesting and (2) we've probably processed it before
4634 when we built the inner expression. */
4643 ret = extract_muldiv_1 (t, c, code, wide_type);
4650 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4652 tree type = TREE_TYPE (t);
4653 enum tree_code tcode = TREE_CODE (t);
4654 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4655 > GET_MODE_SIZE (TYPE_MODE (type)))
4656 ? wide_type : type);
4658 int same_p = tcode == code;
4659 tree op0 = NULL_TREE, op1 = NULL_TREE;
4661 /* Don't deal with constants of zero here; they confuse the code below. */
4662 if (integer_zerop (c))
4665 if (TREE_CODE_CLASS (tcode) == '1')
4666 op0 = TREE_OPERAND (t, 0);
4668 if (TREE_CODE_CLASS (tcode) == '2')
4669 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4671 /* Note that we need not handle conditional operations here since fold
4672 already handles those cases. So just do arithmetic here. */
4676 /* For a constant, we can always simplify if we are a multiply
4677 or (for divide and modulus) if it is a multiple of our constant. */
4678 if (code == MULT_EXPR
4679 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4680 return const_binop (code, fold_convert (ctype, t),
4681 fold_convert (ctype, c), 0);
4684 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4685 /* If op0 is an expression ... */
4686 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4687 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4688 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4689 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4690 /* ... and is unsigned, and its type is smaller than ctype,
4691 then we cannot pass through as widening. */
4692 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4693 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4694 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4695 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4696 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4697 /* ... or its type is larger than ctype,
4698 then we cannot pass through this truncation. */
4699 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4700 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4701 /* ... or signedness changes for division or modulus,
4702 then we cannot pass through this conversion. */
4703 || (code != MULT_EXPR
4704 && (TYPE_UNSIGNED (ctype)
4705 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4708 /* Pass the constant down and see if we can make a simplification. If
4709 we can, replace this expression with the inner simplification for
4710 possible later conversion to our or some other type. */
4711 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4712 && TREE_CODE (t2) == INTEGER_CST
4713 && ! TREE_CONSTANT_OVERFLOW (t2)
4714 && (0 != (t1 = extract_muldiv (op0, t2, code,
4716 ? ctype : NULL_TREE))))
4720 case NEGATE_EXPR: case ABS_EXPR:
4721 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4722 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4725 case MIN_EXPR: case MAX_EXPR:
4726 /* If widening the type changes the signedness, then we can't perform
4727 this optimization as that changes the result. */
4728 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4731 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4732 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4733 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4735 if (tree_int_cst_sgn (c) < 0)
4736 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4738 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4739 fold_convert (ctype, t2)));
4743 case LSHIFT_EXPR: case RSHIFT_EXPR:
4744 /* If the second operand is constant, this is a multiplication
4745 or floor division, by a power of two, so we can treat it that
4746 way unless the multiplier or divisor overflows. */
4747 if (TREE_CODE (op1) == INTEGER_CST
4748 /* const_binop may not detect overflow correctly,
4749 so check for it explicitly here. */
4750 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4751 && TREE_INT_CST_HIGH (op1) == 0
4752 && 0 != (t1 = fold_convert (ctype,
4753 const_binop (LSHIFT_EXPR,
4756 && ! TREE_OVERFLOW (t1))
4757 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
4758 ? MULT_EXPR : FLOOR_DIV_EXPR,
4759 ctype, fold_convert (ctype, op0), t1),
4760 c, code, wide_type);
4763 case PLUS_EXPR: case MINUS_EXPR:
4764 /* See if we can eliminate the operation on both sides. If we can, we
4765 can return a new PLUS or MINUS. If we can't, the only remaining
4766 cases where we can do anything are if the second operand is a
4768 t1 = extract_muldiv (op0, c, code, wide_type);
4769 t2 = extract_muldiv (op1, c, code, wide_type);
4770 if (t1 != 0 && t2 != 0
4771 && (code == MULT_EXPR
4772 /* If not multiplication, we can only do this if both operands
4773 are divisible by c. */
4774 || (multiple_of_p (ctype, op0, c)
4775 && multiple_of_p (ctype, op1, c))))
4776 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4777 fold_convert (ctype, t2)));
4779 /* If this was a subtraction, negate OP1 and set it to be an addition.
4780 This simplifies the logic below. */
4781 if (tcode == MINUS_EXPR)
4782 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4784 if (TREE_CODE (op1) != INTEGER_CST)
4787 /* If either OP1 or C are negative, this optimization is not safe for
4788 some of the division and remainder types while for others we need
4789 to change the code. */
4790 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4792 if (code == CEIL_DIV_EXPR)
4793 code = FLOOR_DIV_EXPR;
4794 else if (code == FLOOR_DIV_EXPR)
4795 code = CEIL_DIV_EXPR;
4796 else if (code != MULT_EXPR
4797 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4801 /* If it's a multiply or a division/modulus operation of a multiple
4802 of our constant, do the operation and verify it doesn't overflow. */
4803 if (code == MULT_EXPR
4804 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4806 op1 = const_binop (code, fold_convert (ctype, op1),
4807 fold_convert (ctype, c), 0);
4808 /* We allow the constant to overflow with wrapping semantics. */
4810 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4816 /* If we have an unsigned type is not a sizetype, we cannot widen
4817 the operation since it will change the result if the original
4818 computation overflowed. */
4819 if (TYPE_UNSIGNED (ctype)
4820 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4824 /* If we were able to eliminate our operation from the first side,
4825 apply our operation to the second side and reform the PLUS. */
4826 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4827 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
4829 /* The last case is if we are a multiply. In that case, we can
4830 apply the distributive law to commute the multiply and addition
4831 if the multiplication of the constants doesn't overflow. */
4832 if (code == MULT_EXPR)
4833 return fold (build2 (tcode, ctype,
4834 fold (build2 (code, ctype,
4835 fold_convert (ctype, op0),
4836 fold_convert (ctype, c))),
4842 /* We have a special case here if we are doing something like
4843 (C * 8) % 4 since we know that's zero. */
4844 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4845 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4846 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4847 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4848 return omit_one_operand (type, integer_zero_node, op0);
4850 /* ... fall through ... */
4852 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4853 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4854 /* If we can extract our operation from the LHS, do so and return a
4855 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4856 do something only if the second operand is a constant. */
4858 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4859 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4860 fold_convert (ctype, op1)));
4861 else if (tcode == MULT_EXPR && code == MULT_EXPR
4862 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4863 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4864 fold_convert (ctype, t1)));
4865 else if (TREE_CODE (op1) != INTEGER_CST)
4868 /* If these are the same operation types, we can associate them
4869 assuming no overflow. */
4871 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4872 fold_convert (ctype, c), 0))
4873 && ! TREE_OVERFLOW (t1))
4874 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
4876 /* If these operations "cancel" each other, we have the main
4877 optimizations of this pass, which occur when either constant is a
4878 multiple of the other, in which case we replace this with either an
4879 operation or CODE or TCODE.
4881 If we have an unsigned type that is not a sizetype, we cannot do
4882 this since it will change the result if the original computation
4884 if ((! TYPE_UNSIGNED (ctype)
4885 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4887 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4888 || (tcode == MULT_EXPR
4889 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4890 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4892 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4893 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4894 fold_convert (ctype,
4895 const_binop (TRUNC_DIV_EXPR,
4897 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4898 return fold (build2 (code, ctype, fold_convert (ctype, op0),
4899 fold_convert (ctype,
4900 const_binop (TRUNC_DIV_EXPR,
4912 /* Return a node which has the indicated constant VALUE (either 0 or
4913 1), and is of the indicated TYPE. */
4916 constant_boolean_node (int value, tree type)
4918 if (type == integer_type_node)
4919 return value ? integer_one_node : integer_zero_node;
4920 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4921 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4922 : integer_zero_node);
4925 tree t = build_int_2 (value, 0);
4927 TREE_TYPE (t) = type;
4932 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4933 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4934 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4935 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4936 COND is the first argument to CODE; otherwise (as in the example
4937 given here), it is the second argument. TYPE is the type of the
4938 original expression. Return NULL_TREE if no simplification is
4942 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4943 tree cond, tree arg, int cond_first_p)
4945 tree test, true_value, false_value;
4946 tree lhs = NULL_TREE;
4947 tree rhs = NULL_TREE;
4949 /* This transformation is only worthwhile if we don't have to wrap
4950 arg in a SAVE_EXPR, and the operation can be simplified on atleast
4951 one of the branches once its pushed inside the COND_EXPR. */
4952 if (!TREE_CONSTANT (arg))
4955 if (TREE_CODE (cond) == COND_EXPR)
4957 test = TREE_OPERAND (cond, 0);
4958 true_value = TREE_OPERAND (cond, 1);
4959 false_value = TREE_OPERAND (cond, 2);
4960 /* If this operand throws an expression, then it does not make
4961 sense to try to perform a logical or arithmetic operation
4963 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4965 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4970 tree testtype = TREE_TYPE (cond);
4972 true_value = constant_boolean_node (true, testtype);
4973 false_value = constant_boolean_node (false, testtype);
4977 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
4978 : build2 (code, type, arg, true_value));
4980 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
4981 : build2 (code, type, arg, false_value));
4983 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
4984 return fold_convert (type, test);
4988 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4990 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4991 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4992 ADDEND is the same as X.
4994 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4995 and finite. The problematic cases are when X is zero, and its mode
4996 has signed zeros. In the case of rounding towards -infinity,
4997 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4998 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5001 fold_real_zero_addition_p (tree type, tree addend, int negate)
5003 if (!real_zerop (addend))
5006 /* Don't allow the fold with -fsignaling-nans. */
5007 if (HONOR_SNANS (TYPE_MODE (type)))
5010 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5011 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5014 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5015 if (TREE_CODE (addend) == REAL_CST
5016 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5019 /* The mode has signed zeros, and we have to honor their sign.
5020 In this situation, there is only one case we can return true for.
5021 X - 0 is the same as X unless rounding towards -infinity is
5023 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5026 /* Subroutine of fold() that checks comparisons of built-in math
5027 functions against real constants.
5029 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5030 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5031 is the type of the result and ARG0 and ARG1 are the operands of the
5032 comparison. ARG1 must be a TREE_REAL_CST.
5034 The function returns the constant folded tree if a simplification
5035 can be made, and NULL_TREE otherwise. */
5038 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5039 tree type, tree arg0, tree arg1)
5043 if (BUILTIN_SQRT_P (fcode))
5045 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5046 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5048 c = TREE_REAL_CST (arg1);
5049 if (REAL_VALUE_NEGATIVE (c))
5051 /* sqrt(x) < y is always false, if y is negative. */
5052 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5053 return omit_one_operand (type, integer_zero_node, arg);
5055 /* sqrt(x) > y is always true, if y is negative and we
5056 don't care about NaNs, i.e. negative values of x. */
5057 if (code == NE_EXPR || !HONOR_NANS (mode))
5058 return omit_one_operand (type, integer_one_node, arg);
5060 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5061 return fold (build2 (GE_EXPR, type, arg,
5062 build_real (TREE_TYPE (arg), dconst0)));
5064 else if (code == GT_EXPR || code == GE_EXPR)
5068 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5069 real_convert (&c2, mode, &c2);
5071 if (REAL_VALUE_ISINF (c2))
5073 /* sqrt(x) > y is x == +Inf, when y is very large. */
5074 if (HONOR_INFINITIES (mode))
5075 return fold (build2 (EQ_EXPR, type, arg,
5076 build_real (TREE_TYPE (arg), c2)));
5078 /* sqrt(x) > y is always false, when y is very large
5079 and we don't care about infinities. */
5080 return omit_one_operand (type, integer_zero_node, arg);
5083 /* sqrt(x) > c is the same as x > c*c. */
5084 return fold (build2 (code, type, arg,
5085 build_real (TREE_TYPE (arg), c2)));
5087 else if (code == LT_EXPR || code == LE_EXPR)
5091 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5092 real_convert (&c2, mode, &c2);
5094 if (REAL_VALUE_ISINF (c2))
5096 /* sqrt(x) < y is always true, when y is a very large
5097 value and we don't care about NaNs or Infinities. */
5098 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5099 return omit_one_operand (type, integer_one_node, arg);
5101 /* sqrt(x) < y is x != +Inf when y is very large and we
5102 don't care about NaNs. */
5103 if (! HONOR_NANS (mode))
5104 return fold (build2 (NE_EXPR, type, arg,
5105 build_real (TREE_TYPE (arg), c2)));
5107 /* sqrt(x) < y is x >= 0 when y is very large and we
5108 don't care about Infinities. */
5109 if (! HONOR_INFINITIES (mode))
5110 return fold (build2 (GE_EXPR, type, arg,
5111 build_real (TREE_TYPE (arg), dconst0)));
5113 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5114 if (lang_hooks.decls.global_bindings_p () != 0
5115 || CONTAINS_PLACEHOLDER_P (arg))
5118 arg = save_expr (arg);
5119 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5120 fold (build2 (GE_EXPR, type, arg,
5121 build_real (TREE_TYPE (arg),
5123 fold (build2 (NE_EXPR, type, arg,
5124 build_real (TREE_TYPE (arg),
5128 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5129 if (! HONOR_NANS (mode))
5130 return fold (build2 (code, type, arg,
5131 build_real (TREE_TYPE (arg), c2)));
5133 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5134 if (lang_hooks.decls.global_bindings_p () == 0
5135 && ! CONTAINS_PLACEHOLDER_P (arg))
5137 arg = save_expr (arg);
5138 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5139 fold (build2 (GE_EXPR, type, arg,
5140 build_real (TREE_TYPE (arg),
5142 fold (build2 (code, type, arg,
5143 build_real (TREE_TYPE (arg),
5152 /* Subroutine of fold() that optimizes comparisons against Infinities,
5153 either +Inf or -Inf.
5155 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5156 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5157 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5159 The function returns the constant folded tree if a simplification
5160 can be made, and NULL_TREE otherwise. */
5163 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5165 enum machine_mode mode;
5166 REAL_VALUE_TYPE max;
5170 mode = TYPE_MODE (TREE_TYPE (arg0));
5172 /* For negative infinity swap the sense of the comparison. */
5173 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5175 code = swap_tree_comparison (code);
5180 /* x > +Inf is always false, if with ignore sNANs. */
5181 if (HONOR_SNANS (mode))
5183 return omit_one_operand (type, integer_zero_node, arg0);
5186 /* x <= +Inf is always true, if we don't case about NaNs. */
5187 if (! HONOR_NANS (mode))
5188 return omit_one_operand (type, integer_one_node, arg0);
5190 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5191 if (lang_hooks.decls.global_bindings_p () == 0
5192 && ! CONTAINS_PLACEHOLDER_P (arg0))
5194 arg0 = save_expr (arg0);
5195 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5201 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5202 real_maxval (&max, neg, mode);
5203 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5204 arg0, build_real (TREE_TYPE (arg0), max)));
5207 /* x < +Inf is always equal to x <= DBL_MAX. */
5208 real_maxval (&max, neg, mode);
5209 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5210 arg0, build_real (TREE_TYPE (arg0), max)));
5213 /* x != +Inf is always equal to !(x > DBL_MAX). */
5214 real_maxval (&max, neg, mode);
5215 if (! HONOR_NANS (mode))
5216 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5217 arg0, build_real (TREE_TYPE (arg0), max)));
5218 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5219 arg0, build_real (TREE_TYPE (arg0), max)));
5220 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5229 /* Subroutine of fold() that optimizes comparisons of a division by
5230 a nonzero integer constant against an integer constant, i.e.
5233 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5234 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5235 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5237 The function returns the constant folded tree if a simplification
5238 can be made, and NULL_TREE otherwise. */
5241 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5243 tree prod, tmp, hi, lo;
5244 tree arg00 = TREE_OPERAND (arg0, 0);
5245 tree arg01 = TREE_OPERAND (arg0, 1);
5246 unsigned HOST_WIDE_INT lpart;
5247 HOST_WIDE_INT hpart;
5250 /* We have to do this the hard way to detect unsigned overflow.
5251 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5252 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5253 TREE_INT_CST_HIGH (arg01),
5254 TREE_INT_CST_LOW (arg1),
5255 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5256 prod = build_int_2 (lpart, hpart);
5257 TREE_TYPE (prod) = TREE_TYPE (arg00);
5258 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5259 || TREE_INT_CST_HIGH (prod) != hpart
5260 || TREE_INT_CST_LOW (prod) != lpart;
5261 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5263 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5265 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5268 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5269 overflow = add_double (TREE_INT_CST_LOW (prod),
5270 TREE_INT_CST_HIGH (prod),
5271 TREE_INT_CST_LOW (tmp),
5272 TREE_INT_CST_HIGH (tmp),
5274 hi = build_int_2 (lpart, hpart);
5275 TREE_TYPE (hi) = TREE_TYPE (arg00);
5276 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5277 || TREE_INT_CST_HIGH (hi) != hpart
5278 || TREE_INT_CST_LOW (hi) != lpart
5279 || TREE_OVERFLOW (prod);
5280 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5282 else if (tree_int_cst_sgn (arg01) >= 0)
5284 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5285 switch (tree_int_cst_sgn (arg1))
5288 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5293 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5298 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5308 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5309 switch (tree_int_cst_sgn (arg1))
5312 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5317 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5322 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5334 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5335 return omit_one_operand (type, integer_zero_node, arg00);
5336 if (TREE_OVERFLOW (hi))
5337 return fold (build2 (GE_EXPR, type, arg00, lo));
5338 if (TREE_OVERFLOW (lo))
5339 return fold (build2 (LE_EXPR, type, arg00, hi));
5340 return build_range_check (type, arg00, 1, lo, hi);
5343 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5344 return omit_one_operand (type, integer_one_node, arg00);
5345 if (TREE_OVERFLOW (hi))
5346 return fold (build2 (LT_EXPR, type, arg00, lo));
5347 if (TREE_OVERFLOW (lo))
5348 return fold (build2 (GT_EXPR, type, arg00, hi));
5349 return build_range_check (type, arg00, 0, lo, hi);
5352 if (TREE_OVERFLOW (lo))
5353 return omit_one_operand (type, integer_zero_node, arg00);
5354 return fold (build2 (LT_EXPR, type, arg00, lo));
5357 if (TREE_OVERFLOW (hi))
5358 return omit_one_operand (type, integer_one_node, arg00);
5359 return fold (build2 (LE_EXPR, type, arg00, hi));
5362 if (TREE_OVERFLOW (hi))
5363 return omit_one_operand (type, integer_zero_node, arg00);
5364 return fold (build2 (GT_EXPR, type, arg00, hi));
5367 if (TREE_OVERFLOW (lo))
5368 return omit_one_operand (type, integer_one_node, arg00);
5369 return fold (build2 (GE_EXPR, type, arg00, lo));
5379 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5380 equality/inequality test, then return a simplified form of
5381 the test using shifts and logical operations. Otherwise return
5382 NULL. TYPE is the desired result type. */
5385 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5388 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5390 if (code == TRUTH_NOT_EXPR)
5392 code = TREE_CODE (arg0);
5393 if (code != NE_EXPR && code != EQ_EXPR)
5396 /* Extract the arguments of the EQ/NE. */
5397 arg1 = TREE_OPERAND (arg0, 1);
5398 arg0 = TREE_OPERAND (arg0, 0);
5400 /* This requires us to invert the code. */
5401 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5404 /* If this is testing a single bit, we can optimize the test. */
5405 if ((code == NE_EXPR || code == EQ_EXPR)
5406 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5407 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5409 tree inner = TREE_OPERAND (arg0, 0);
5410 tree type = TREE_TYPE (arg0);
5411 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5412 enum machine_mode operand_mode = TYPE_MODE (type);
5414 tree signed_type, unsigned_type, intermediate_type;
5417 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5418 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5419 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5420 if (arg00 != NULL_TREE
5421 /* This is only a win if casting to a signed type is cheap,
5422 i.e. when arg00's type is not a partial mode. */
5423 && TYPE_PRECISION (TREE_TYPE (arg00))
5424 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5426 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5427 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5428 result_type, fold_convert (stype, arg00),
5429 fold_convert (stype, integer_zero_node)));
5432 /* Otherwise we have (A & C) != 0 where C is a single bit,
5433 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5434 Similarly for (A & C) == 0. */
5436 /* If INNER is a right shift of a constant and it plus BITNUM does
5437 not overflow, adjust BITNUM and INNER. */
5438 if (TREE_CODE (inner) == RSHIFT_EXPR
5439 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5440 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5441 && bitnum < TYPE_PRECISION (type)
5442 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5443 bitnum - TYPE_PRECISION (type)))
5445 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5446 inner = TREE_OPERAND (inner, 0);
5449 /* If we are going to be able to omit the AND below, we must do our
5450 operations as unsigned. If we must use the AND, we have a choice.
5451 Normally unsigned is faster, but for some machines signed is. */
5452 #ifdef LOAD_EXTEND_OP
5453 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5458 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5459 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5460 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5461 inner = fold_convert (intermediate_type, inner);
5464 inner = build2 (RSHIFT_EXPR, intermediate_type,
5465 inner, size_int (bitnum));
5467 if (code == EQ_EXPR)
5468 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5469 inner, integer_one_node);
5471 /* Put the AND last so it can combine with more things. */
5472 inner = build2 (BIT_AND_EXPR, intermediate_type,
5473 inner, integer_one_node);
5475 /* Make sure to return the proper type. */
5476 inner = fold_convert (result_type, inner);
5483 /* Check whether we are allowed to reorder operands arg0 and arg1,
5484 such that the evaluation of arg1 occurs before arg0. */
5487 reorder_operands_p (tree arg0, tree arg1)
5489 if (! flag_evaluation_order)
5491 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5493 return ! TREE_SIDE_EFFECTS (arg0)
5494 && ! TREE_SIDE_EFFECTS (arg1);
5497 /* Test whether it is preferable two swap two operands, ARG0 and
5498 ARG1, for example because ARG0 is an integer constant and ARG1
5499 isn't. If REORDER is true, only recommend swapping if we can
5500 evaluate the operands in reverse order. */
5503 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5505 STRIP_SIGN_NOPS (arg0);
5506 STRIP_SIGN_NOPS (arg1);
5508 if (TREE_CODE (arg1) == INTEGER_CST)
5510 if (TREE_CODE (arg0) == INTEGER_CST)
5513 if (TREE_CODE (arg1) == REAL_CST)
5515 if (TREE_CODE (arg0) == REAL_CST)
5518 if (TREE_CODE (arg1) == COMPLEX_CST)
5520 if (TREE_CODE (arg0) == COMPLEX_CST)
5523 if (TREE_CONSTANT (arg1))
5525 if (TREE_CONSTANT (arg0))
5531 if (reorder && flag_evaluation_order
5532 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5540 if (reorder && flag_evaluation_order
5541 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5552 /* Perform constant folding and related simplification of EXPR.
5553 The related simplifications include x*1 => x, x*0 => 0, etc.,
5554 and application of the associative law.
5555 NOP_EXPR conversions may be removed freely (as long as we
5556 are careful not to change the type of the overall expression).
5557 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5558 but we can constant-fold them if they have constant operands. */
5560 #ifdef ENABLE_FOLD_CHECKING
5561 # define fold(x) fold_1 (x)
5562 static tree fold_1 (tree);
5568 const tree t = expr;
5569 const tree type = TREE_TYPE (expr);
5570 tree t1 = NULL_TREE;
5572 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5573 enum tree_code code = TREE_CODE (t);
5574 int kind = TREE_CODE_CLASS (code);
5576 /* WINS will be nonzero when the switch is done
5577 if all operands are constant. */
5580 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5581 Likewise for a SAVE_EXPR that's already been evaluated. */
5582 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5585 /* Return right away if a constant. */
5589 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5593 /* Special case for conversion ops that can have fixed point args. */
5594 arg0 = TREE_OPERAND (t, 0);
5596 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5598 STRIP_SIGN_NOPS (arg0);
5600 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5601 subop = TREE_REALPART (arg0);
5605 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5606 && TREE_CODE (subop) != REAL_CST)
5607 /* Note that TREE_CONSTANT isn't enough:
5608 static var addresses are constant but we can't
5609 do arithmetic on them. */
5612 else if (IS_EXPR_CODE_CLASS (kind))
5614 int len = first_rtl_op (code);
5616 for (i = 0; i < len; i++)
5618 tree op = TREE_OPERAND (t, i);
5622 continue; /* Valid for CALL_EXPR, at least. */
5624 /* Strip any conversions that don't change the mode. This is
5625 safe for every expression, except for a comparison expression
5626 because its signedness is derived from its operands. So, in
5627 the latter case, only strip conversions that don't change the
5630 Note that this is done as an internal manipulation within the
5631 constant folder, in order to find the simplest representation
5632 of the arguments so that their form can be studied. In any
5633 cases, the appropriate type conversions should be put back in
5634 the tree that will get out of the constant folder. */
5636 STRIP_SIGN_NOPS (op);
5640 if (TREE_CODE (op) == COMPLEX_CST)
5641 subop = TREE_REALPART (op);
5645 if (TREE_CODE (subop) != INTEGER_CST
5646 && TREE_CODE (subop) != REAL_CST)
5647 /* Note that TREE_CONSTANT isn't enough:
5648 static var addresses are constant but we can't
5649 do arithmetic on them. */
5659 /* If this is a commutative operation, and ARG0 is a constant, move it
5660 to ARG1 to reduce the number of tests below. */
5661 if (commutative_tree_code (code)
5662 && tree_swap_operands_p (arg0, arg1, true))
5663 return fold (build2 (code, type, TREE_OPERAND (t, 1),
5664 TREE_OPERAND (t, 0)));
5666 /* Now WINS is set as described above,
5667 ARG0 is the first operand of EXPR,
5668 and ARG1 is the second operand (if it has more than one operand).
5670 First check for cases where an arithmetic operation is applied to a
5671 compound, conditional, or comparison operation. Push the arithmetic
5672 operation inside the compound or conditional to see if any folding
5673 can then be done. Convert comparison to conditional for this purpose.
5674 The also optimizes non-constant cases that used to be done in
5677 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5678 one of the operands is a comparison and the other is a comparison, a
5679 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5680 code below would make the expression more complex. Change it to a
5681 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5682 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5684 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5685 || code == EQ_EXPR || code == NE_EXPR)
5686 && ((truth_value_p (TREE_CODE (arg0))
5687 && (truth_value_p (TREE_CODE (arg1))
5688 || (TREE_CODE (arg1) == BIT_AND_EXPR
5689 && integer_onep (TREE_OPERAND (arg1, 1)))))
5690 || (truth_value_p (TREE_CODE (arg1))
5691 && (truth_value_p (TREE_CODE (arg0))
5692 || (TREE_CODE (arg0) == BIT_AND_EXPR
5693 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5695 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5696 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5698 type, fold_convert (boolean_type_node, arg0),
5699 fold_convert (boolean_type_node, arg1)));
5701 if (code == EQ_EXPR)
5702 tem = invert_truthvalue (tem);
5707 if (TREE_CODE_CLASS (code) == '1')
5709 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5710 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5711 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5712 else if (TREE_CODE (arg0) == COND_EXPR)
5714 tree arg01 = TREE_OPERAND (arg0, 1);
5715 tree arg02 = TREE_OPERAND (arg0, 2);
5716 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5717 arg01 = fold (build1 (code, type, arg01));
5718 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5719 arg02 = fold (build1 (code, type, arg02));
5720 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5723 /* If this was a conversion, and all we did was to move into
5724 inside the COND_EXPR, bring it back out. But leave it if
5725 it is a conversion from integer to integer and the
5726 result precision is no wider than a word since such a
5727 conversion is cheap and may be optimized away by combine,
5728 while it couldn't if it were outside the COND_EXPR. Then return
5729 so we don't get into an infinite recursion loop taking the
5730 conversion out and then back in. */
5732 if ((code == NOP_EXPR || code == CONVERT_EXPR
5733 || code == NON_LVALUE_EXPR)
5734 && TREE_CODE (tem) == COND_EXPR
5735 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5736 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5737 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5738 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5739 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5740 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5741 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5743 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5744 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5745 tem = build1 (code, type,
5747 TREE_TYPE (TREE_OPERAND
5748 (TREE_OPERAND (tem, 1), 0)),
5749 TREE_OPERAND (tem, 0),
5750 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5751 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5754 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5756 if (TREE_CODE (type) == BOOLEAN_TYPE)
5758 arg0 = copy_node (arg0);
5759 TREE_TYPE (arg0) = type;
5762 else if (TREE_CODE (type) != INTEGER_TYPE)
5763 return fold (build3 (COND_EXPR, type, arg0,
5764 fold (build1 (code, type,
5766 fold (build1 (code, type,
5767 integer_zero_node))));
5770 else if (TREE_CODE_CLASS (code) == '<'
5771 && TREE_CODE (arg0) == COMPOUND_EXPR)
5772 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5773 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
5774 else if (TREE_CODE_CLASS (code) == '<'
5775 && TREE_CODE (arg1) == COMPOUND_EXPR)
5776 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5777 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
5778 else if (TREE_CODE_CLASS (code) == '2'
5779 || TREE_CODE_CLASS (code) == '<')
5781 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5782 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5783 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
5785 if (TREE_CODE (arg1) == COMPOUND_EXPR
5786 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5787 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5788 fold (build2 (code, type,
5789 arg0, TREE_OPERAND (arg1, 1))));
5791 if (TREE_CODE (arg0) == COND_EXPR
5792 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5794 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5795 /*cond_first_p=*/1);
5796 if (tem != NULL_TREE)
5800 if (TREE_CODE (arg1) == COND_EXPR
5801 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5803 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5804 /*cond_first_p=*/0);
5805 if (tem != NULL_TREE)
5813 return fold (DECL_INITIAL (t));
5818 case FIX_TRUNC_EXPR:
5820 case FIX_FLOOR_EXPR:
5821 case FIX_ROUND_EXPR:
5822 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5823 return TREE_OPERAND (t, 0);
5825 /* Handle cases of two conversions in a row. */
5826 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5827 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5829 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5830 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5831 int inside_int = INTEGRAL_TYPE_P (inside_type);
5832 int inside_ptr = POINTER_TYPE_P (inside_type);
5833 int inside_float = FLOAT_TYPE_P (inside_type);
5834 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5835 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5836 int inter_int = INTEGRAL_TYPE_P (inter_type);
5837 int inter_ptr = POINTER_TYPE_P (inter_type);
5838 int inter_float = FLOAT_TYPE_P (inter_type);
5839 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5840 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5841 int final_int = INTEGRAL_TYPE_P (type);
5842 int final_ptr = POINTER_TYPE_P (type);
5843 int final_float = FLOAT_TYPE_P (type);
5844 unsigned int final_prec = TYPE_PRECISION (type);
5845 int final_unsignedp = TYPE_UNSIGNED (type);
5847 /* In addition to the cases of two conversions in a row
5848 handled below, if we are converting something to its own
5849 type via an object of identical or wider precision, neither
5850 conversion is needed. */
5851 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5852 && ((inter_int && final_int) || (inter_float && final_float))
5853 && inter_prec >= final_prec)
5854 return fold (build1 (code, type,
5855 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5857 /* Likewise, if the intermediate and final types are either both
5858 float or both integer, we don't need the middle conversion if
5859 it is wider than the final type and doesn't change the signedness
5860 (for integers). Avoid this if the final type is a pointer
5861 since then we sometimes need the inner conversion. Likewise if
5862 the outer has a precision not equal to the size of its mode. */
5863 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5864 || (inter_float && inside_float))
5865 && inter_prec >= inside_prec
5866 && (inter_float || inter_unsignedp == inside_unsignedp)
5867 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5868 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5870 return fold (build1 (code, type,
5871 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5873 /* If we have a sign-extension of a zero-extended value, we can
5874 replace that by a single zero-extension. */
5875 if (inside_int && inter_int && final_int
5876 && inside_prec < inter_prec && inter_prec < final_prec
5877 && inside_unsignedp && !inter_unsignedp)
5878 return fold (build1 (code, type,
5879 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5881 /* Two conversions in a row are not needed unless:
5882 - some conversion is floating-point (overstrict for now), or
5883 - the intermediate type is narrower than both initial and
5885 - the intermediate type and innermost type differ in signedness,
5886 and the outermost type is wider than the intermediate, or
5887 - the initial type is a pointer type and the precisions of the
5888 intermediate and final types differ, or
5889 - the final type is a pointer type and the precisions of the
5890 initial and intermediate types differ. */
5891 if (! inside_float && ! inter_float && ! final_float
5892 && (inter_prec > inside_prec || inter_prec > final_prec)
5893 && ! (inside_int && inter_int
5894 && inter_unsignedp != inside_unsignedp
5895 && inter_prec < final_prec)
5896 && ((inter_unsignedp && inter_prec > inside_prec)
5897 == (final_unsignedp && final_prec > inter_prec))
5898 && ! (inside_ptr && inter_prec != final_prec)
5899 && ! (final_ptr && inside_prec != inter_prec)
5900 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5901 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5903 return fold (build1 (code, type,
5904 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5907 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5908 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5909 /* Detect assigning a bitfield. */
5910 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5911 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5913 /* Don't leave an assignment inside a conversion
5914 unless assigning a bitfield. */
5915 tree prev = TREE_OPERAND (t, 0);
5916 tem = copy_node (t);
5917 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5918 /* First do the assignment, then return converted constant. */
5919 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5920 TREE_NO_WARNING (tem) = 1;
5921 TREE_USED (tem) = 1;
5925 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5926 constants (if x has signed type, the sign bit cannot be set
5927 in c). This folds extension into the BIT_AND_EXPR. */
5928 if (INTEGRAL_TYPE_P (type)
5929 && TREE_CODE (type) != BOOLEAN_TYPE
5930 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5931 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5933 tree and = TREE_OPERAND (t, 0);
5934 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5937 if (TYPE_UNSIGNED (TREE_TYPE (and))
5938 || (TYPE_PRECISION (type)
5939 <= TYPE_PRECISION (TREE_TYPE (and))))
5941 else if (TYPE_PRECISION (TREE_TYPE (and1))
5942 <= HOST_BITS_PER_WIDE_INT
5943 && host_integerp (and1, 1))
5945 unsigned HOST_WIDE_INT cst;
5947 cst = tree_low_cst (and1, 1);
5948 cst &= (HOST_WIDE_INT) -1
5949 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5950 change = (cst == 0);
5951 #ifdef LOAD_EXTEND_OP
5953 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5956 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5957 and0 = fold_convert (uns, and0);
5958 and1 = fold_convert (uns, and1);
5963 return fold (build2 (BIT_AND_EXPR, type,
5964 fold_convert (type, and0),
5965 fold_convert (type, and1)));
5968 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
5969 T2 being pointers to types of the same size. */
5970 if (POINTER_TYPE_P (TREE_TYPE (t))
5971 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
5972 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
5973 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5975 tree arg00 = TREE_OPERAND (arg0, 0);
5976 tree t0 = TREE_TYPE (t);
5977 tree t1 = TREE_TYPE (arg00);
5978 tree tt0 = TREE_TYPE (t0);
5979 tree tt1 = TREE_TYPE (t1);
5980 tree s0 = TYPE_SIZE (tt0);
5981 tree s1 = TYPE_SIZE (tt1);
5983 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
5984 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
5985 TREE_OPERAND (arg0, 1));
5988 tem = fold_convert_const (code, type, arg0);
5989 return tem ? tem : t;
5991 case VIEW_CONVERT_EXPR:
5992 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5993 return build1 (VIEW_CONVERT_EXPR, type,
5994 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5998 if (TREE_CODE (arg0) == CONSTRUCTOR
5999 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6001 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6003 return TREE_VALUE (m);
6008 if (TREE_CONSTANT (t) != wins)
6010 tem = copy_node (t);
6011 TREE_CONSTANT (tem) = wins;
6012 TREE_INVARIANT (tem) = wins;
6018 if (negate_expr_p (arg0))
6019 return fold_convert (type, negate_expr (arg0));
6023 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6024 return fold_abs_const (arg0, type);
6025 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6026 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6027 /* Convert fabs((double)float) into (double)fabsf(float). */
6028 else if (TREE_CODE (arg0) == NOP_EXPR
6029 && TREE_CODE (type) == REAL_TYPE)
6031 tree targ0 = strip_float_extensions (arg0);
6033 return fold_convert (type, fold (build1 (ABS_EXPR,
6037 else if (tree_expr_nonnegative_p (arg0))
6042 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6043 return fold_convert (type, arg0);
6044 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6045 return build2 (COMPLEX_EXPR, type,
6046 TREE_OPERAND (arg0, 0),
6047 negate_expr (TREE_OPERAND (arg0, 1)));
6048 else if (TREE_CODE (arg0) == COMPLEX_CST)
6049 return build_complex (type, TREE_REALPART (arg0),
6050 negate_expr (TREE_IMAGPART (arg0)));
6051 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6052 return fold (build2 (TREE_CODE (arg0), type,
6053 fold (build1 (CONJ_EXPR, type,
6054 TREE_OPERAND (arg0, 0))),
6055 fold (build1 (CONJ_EXPR, type,
6056 TREE_OPERAND (arg0, 1)))));
6057 else if (TREE_CODE (arg0) == CONJ_EXPR)
6058 return TREE_OPERAND (arg0, 0);
6062 if (TREE_CODE (arg0) == INTEGER_CST)
6063 return fold_not_const (arg0, type);
6064 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6065 return TREE_OPERAND (arg0, 0);
6069 /* A + (-B) -> A - B */
6070 if (TREE_CODE (arg1) == NEGATE_EXPR)
6071 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6072 /* (-A) + B -> B - A */
6073 if (TREE_CODE (arg0) == NEGATE_EXPR
6074 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6075 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6076 if (! FLOAT_TYPE_P (type))
6078 if (integer_zerop (arg1))
6079 return non_lvalue (fold_convert (type, arg0));
6081 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6082 with a constant, and the two constants have no bits in common,
6083 we should treat this as a BIT_IOR_EXPR since this may produce more
6085 if (TREE_CODE (arg0) == BIT_AND_EXPR
6086 && TREE_CODE (arg1) == BIT_AND_EXPR
6087 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6088 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6089 && integer_zerop (const_binop (BIT_AND_EXPR,
6090 TREE_OPERAND (arg0, 1),
6091 TREE_OPERAND (arg1, 1), 0)))
6093 code = BIT_IOR_EXPR;
6097 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6098 (plus (plus (mult) (mult)) (foo)) so that we can
6099 take advantage of the factoring cases below. */
6100 if ((TREE_CODE (arg0) == PLUS_EXPR
6101 && TREE_CODE (arg1) == MULT_EXPR)
6102 || (TREE_CODE (arg1) == PLUS_EXPR
6103 && TREE_CODE (arg0) == MULT_EXPR))
6105 tree parg0, parg1, parg, marg;
6107 if (TREE_CODE (arg0) == PLUS_EXPR)
6108 parg = arg0, marg = arg1;
6110 parg = arg1, marg = arg0;
6111 parg0 = TREE_OPERAND (parg, 0);
6112 parg1 = TREE_OPERAND (parg, 1);
6116 if (TREE_CODE (parg0) == MULT_EXPR
6117 && TREE_CODE (parg1) != MULT_EXPR)
6118 return fold (build2 (PLUS_EXPR, type,
6119 fold (build2 (PLUS_EXPR, type,
6120 fold_convert (type, parg0),
6121 fold_convert (type, marg))),
6122 fold_convert (type, parg1)));
6123 if (TREE_CODE (parg0) != MULT_EXPR
6124 && TREE_CODE (parg1) == MULT_EXPR)
6125 return fold (build2 (PLUS_EXPR, type,
6126 fold (build2 (PLUS_EXPR, type,
6127 fold_convert (type, parg1),
6128 fold_convert (type, marg))),
6129 fold_convert (type, parg0)));
6132 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6134 tree arg00, arg01, arg10, arg11;
6135 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6137 /* (A * C) + (B * C) -> (A+B) * C.
6138 We are most concerned about the case where C is a constant,
6139 but other combinations show up during loop reduction. Since
6140 it is not difficult, try all four possibilities. */
6142 arg00 = TREE_OPERAND (arg0, 0);
6143 arg01 = TREE_OPERAND (arg0, 1);
6144 arg10 = TREE_OPERAND (arg1, 0);
6145 arg11 = TREE_OPERAND (arg1, 1);
6148 if (operand_equal_p (arg01, arg11, 0))
6149 same = arg01, alt0 = arg00, alt1 = arg10;
6150 else if (operand_equal_p (arg00, arg10, 0))
6151 same = arg00, alt0 = arg01, alt1 = arg11;
6152 else if (operand_equal_p (arg00, arg11, 0))
6153 same = arg00, alt0 = arg01, alt1 = arg10;
6154 else if (operand_equal_p (arg01, arg10, 0))
6155 same = arg01, alt0 = arg00, alt1 = arg11;
6157 /* No identical multiplicands; see if we can find a common
6158 power-of-two factor in non-power-of-two multiplies. This
6159 can help in multi-dimensional array access. */
6160 else if (TREE_CODE (arg01) == INTEGER_CST
6161 && TREE_CODE (arg11) == INTEGER_CST
6162 && TREE_INT_CST_HIGH (arg01) == 0
6163 && TREE_INT_CST_HIGH (arg11) == 0)
6165 HOST_WIDE_INT int01, int11, tmp;
6166 int01 = TREE_INT_CST_LOW (arg01);
6167 int11 = TREE_INT_CST_LOW (arg11);
6169 /* Move min of absolute values to int11. */
6170 if ((int01 >= 0 ? int01 : -int01)
6171 < (int11 >= 0 ? int11 : -int11))
6173 tmp = int01, int01 = int11, int11 = tmp;
6174 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6175 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6178 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6180 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6181 build_int_2 (int01 / int11, 0)));
6188 return fold (build2 (MULT_EXPR, type,
6189 fold (build2 (PLUS_EXPR, type,
6196 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6197 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6198 return non_lvalue (fold_convert (type, arg0));
6200 /* Likewise if the operands are reversed. */
6201 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6202 return non_lvalue (fold_convert (type, arg1));
6204 /* Convert x+x into x*2.0. */
6205 if (operand_equal_p (arg0, arg1, 0)
6206 && SCALAR_FLOAT_TYPE_P (type))
6207 return fold (build2 (MULT_EXPR, type, arg0,
6208 build_real (type, dconst2)));
6210 /* Convert x*c+x into x*(c+1). */
6211 if (flag_unsafe_math_optimizations
6212 && TREE_CODE (arg0) == MULT_EXPR
6213 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6214 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6215 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6219 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6220 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6221 return fold (build2 (MULT_EXPR, type, arg1,
6222 build_real (type, c)));
6225 /* Convert x+x*c into x*(c+1). */
6226 if (flag_unsafe_math_optimizations
6227 && TREE_CODE (arg1) == MULT_EXPR
6228 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6229 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6230 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6234 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6235 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6236 return fold (build2 (MULT_EXPR, type, arg0,
6237 build_real (type, c)));
6240 /* Convert x*c1+x*c2 into x*(c1+c2). */
6241 if (flag_unsafe_math_optimizations
6242 && TREE_CODE (arg0) == MULT_EXPR
6243 && TREE_CODE (arg1) == MULT_EXPR
6244 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6245 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6246 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6247 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6248 && operand_equal_p (TREE_OPERAND (arg0, 0),
6249 TREE_OPERAND (arg1, 0), 0))
6251 REAL_VALUE_TYPE c1, c2;
6253 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6254 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6255 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6256 return fold (build2 (MULT_EXPR, type,
6257 TREE_OPERAND (arg0, 0),
6258 build_real (type, c1)));
6260 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6261 if (flag_unsafe_math_optimizations
6262 && TREE_CODE (arg1) == PLUS_EXPR
6263 && TREE_CODE (arg0) != MULT_EXPR)
6265 tree tree10 = TREE_OPERAND (arg1, 0);
6266 tree tree11 = TREE_OPERAND (arg1, 1);
6267 if (TREE_CODE (tree11) == MULT_EXPR
6268 && TREE_CODE (tree10) == MULT_EXPR)
6271 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6272 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6275 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6276 if (flag_unsafe_math_optimizations
6277 && TREE_CODE (arg0) == PLUS_EXPR
6278 && TREE_CODE (arg1) != MULT_EXPR)
6280 tree tree00 = TREE_OPERAND (arg0, 0);
6281 tree tree01 = TREE_OPERAND (arg0, 1);
6282 if (TREE_CODE (tree01) == MULT_EXPR
6283 && TREE_CODE (tree00) == MULT_EXPR)
6286 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6287 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6293 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6294 is a rotate of A by C1 bits. */
6295 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6296 is a rotate of A by B bits. */
6298 enum tree_code code0, code1;
6299 code0 = TREE_CODE (arg0);
6300 code1 = TREE_CODE (arg1);
6301 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6302 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6303 && operand_equal_p (TREE_OPERAND (arg0, 0),
6304 TREE_OPERAND (arg1, 0), 0)
6305 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6307 tree tree01, tree11;
6308 enum tree_code code01, code11;
6310 tree01 = TREE_OPERAND (arg0, 1);
6311 tree11 = TREE_OPERAND (arg1, 1);
6312 STRIP_NOPS (tree01);
6313 STRIP_NOPS (tree11);
6314 code01 = TREE_CODE (tree01);
6315 code11 = TREE_CODE (tree11);
6316 if (code01 == INTEGER_CST
6317 && code11 == INTEGER_CST
6318 && TREE_INT_CST_HIGH (tree01) == 0
6319 && TREE_INT_CST_HIGH (tree11) == 0
6320 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6321 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6322 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6323 code0 == LSHIFT_EXPR ? tree01 : tree11);
6324 else if (code11 == MINUS_EXPR)
6326 tree tree110, tree111;
6327 tree110 = TREE_OPERAND (tree11, 0);
6328 tree111 = TREE_OPERAND (tree11, 1);
6329 STRIP_NOPS (tree110);
6330 STRIP_NOPS (tree111);
6331 if (TREE_CODE (tree110) == INTEGER_CST
6332 && 0 == compare_tree_int (tree110,
6334 (TREE_TYPE (TREE_OPERAND
6336 && operand_equal_p (tree01, tree111, 0))
6337 return build2 ((code0 == LSHIFT_EXPR
6340 type, TREE_OPERAND (arg0, 0), tree01);
6342 else if (code01 == MINUS_EXPR)
6344 tree tree010, tree011;
6345 tree010 = TREE_OPERAND (tree01, 0);
6346 tree011 = TREE_OPERAND (tree01, 1);
6347 STRIP_NOPS (tree010);
6348 STRIP_NOPS (tree011);
6349 if (TREE_CODE (tree010) == INTEGER_CST
6350 && 0 == compare_tree_int (tree010,
6352 (TREE_TYPE (TREE_OPERAND
6354 && operand_equal_p (tree11, tree011, 0))
6355 return build2 ((code0 != LSHIFT_EXPR
6358 type, TREE_OPERAND (arg0, 0), tree11);
6364 /* In most languages, can't associate operations on floats through
6365 parentheses. Rather than remember where the parentheses were, we
6366 don't associate floats at all, unless the user has specified
6367 -funsafe-math-optimizations. */
6370 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6372 tree var0, con0, lit0, minus_lit0;
6373 tree var1, con1, lit1, minus_lit1;
6375 /* Split both trees into variables, constants, and literals. Then
6376 associate each group together, the constants with literals,
6377 then the result with variables. This increases the chances of
6378 literals being recombined later and of generating relocatable
6379 expressions for the sum of a constant and literal. */
6380 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6381 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6382 code == MINUS_EXPR);
6384 /* Only do something if we found more than two objects. Otherwise,
6385 nothing has changed and we risk infinite recursion. */
6386 if (2 < ((var0 != 0) + (var1 != 0)
6387 + (con0 != 0) + (con1 != 0)
6388 + (lit0 != 0) + (lit1 != 0)
6389 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6391 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6392 if (code == MINUS_EXPR)
6395 var0 = associate_trees (var0, var1, code, type);
6396 con0 = associate_trees (con0, con1, code, type);
6397 lit0 = associate_trees (lit0, lit1, code, type);
6398 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6400 /* Preserve the MINUS_EXPR if the negative part of the literal is
6401 greater than the positive part. Otherwise, the multiplicative
6402 folding code (i.e extract_muldiv) may be fooled in case
6403 unsigned constants are subtracted, like in the following
6404 example: ((X*2 + 4) - 8U)/2. */
6405 if (minus_lit0 && lit0)
6407 if (TREE_CODE (lit0) == INTEGER_CST
6408 && TREE_CODE (minus_lit0) == INTEGER_CST
6409 && tree_int_cst_lt (lit0, minus_lit0))
6411 minus_lit0 = associate_trees (minus_lit0, lit0,
6417 lit0 = associate_trees (lit0, minus_lit0,
6425 return fold_convert (type,
6426 associate_trees (var0, minus_lit0,
6430 con0 = associate_trees (con0, minus_lit0,
6432 return fold_convert (type,
6433 associate_trees (var0, con0,
6438 con0 = associate_trees (con0, lit0, code, type);
6439 return fold_convert (type, associate_trees (var0, con0,
6446 t1 = const_binop (code, arg0, arg1, 0);
6447 if (t1 != NULL_TREE)
6449 /* The return value should always have
6450 the same type as the original expression. */
6451 if (TREE_TYPE (t1) != type)
6452 t1 = fold_convert (type, t1);
6459 /* A - (-B) -> A + B */
6460 if (TREE_CODE (arg1) == NEGATE_EXPR)
6461 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6462 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6463 if (TREE_CODE (arg0) == NEGATE_EXPR
6464 && (FLOAT_TYPE_P (type)
6465 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6466 && negate_expr_p (arg1)
6467 && reorder_operands_p (arg0, arg1))
6468 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6469 TREE_OPERAND (arg0, 0)));
6471 if (! FLOAT_TYPE_P (type))
6473 if (! wins && integer_zerop (arg0))
6474 return negate_expr (fold_convert (type, arg1));
6475 if (integer_zerop (arg1))
6476 return non_lvalue (fold_convert (type, arg0));
6478 /* Fold A - (A & B) into ~B & A. */
6479 if (!TREE_SIDE_EFFECTS (arg0)
6480 && TREE_CODE (arg1) == BIT_AND_EXPR)
6482 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6483 return fold (build2 (BIT_AND_EXPR, type,
6484 fold (build1 (BIT_NOT_EXPR, type,
6485 TREE_OPERAND (arg1, 0))),
6487 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6488 return fold (build2 (BIT_AND_EXPR, type,
6489 fold (build1 (BIT_NOT_EXPR, type,
6490 TREE_OPERAND (arg1, 1))),
6494 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6495 any power of 2 minus 1. */
6496 if (TREE_CODE (arg0) == BIT_AND_EXPR
6497 && TREE_CODE (arg1) == BIT_AND_EXPR
6498 && operand_equal_p (TREE_OPERAND (arg0, 0),
6499 TREE_OPERAND (arg1, 0), 0))
6501 tree mask0 = TREE_OPERAND (arg0, 1);
6502 tree mask1 = TREE_OPERAND (arg1, 1);
6503 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6505 if (operand_equal_p (tem, mask1, 0))
6507 tem = fold (build2 (BIT_XOR_EXPR, type,
6508 TREE_OPERAND (arg0, 0), mask1));
6509 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6514 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6515 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6516 return non_lvalue (fold_convert (type, arg0));
6518 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6519 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6520 (-ARG1 + ARG0) reduces to -ARG1. */
6521 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6522 return negate_expr (fold_convert (type, arg1));
6524 /* Fold &x - &x. This can happen from &x.foo - &x.
6525 This is unsafe for certain floats even in non-IEEE formats.
6526 In IEEE, it is unsafe because it does wrong for NaNs.
6527 Also note that operand_equal_p is always false if an operand
6530 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6531 && operand_equal_p (arg0, arg1, 0))
6532 return fold_convert (type, integer_zero_node);
6534 /* A - B -> A + (-B) if B is easily negatable. */
6535 if (!wins && negate_expr_p (arg1)
6536 && (FLOAT_TYPE_P (type)
6537 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6538 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6540 if (TREE_CODE (arg0) == MULT_EXPR
6541 && TREE_CODE (arg1) == MULT_EXPR
6542 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6544 /* (A * C) - (B * C) -> (A-B) * C. */
6545 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6546 TREE_OPERAND (arg1, 1), 0))
6547 return fold (build2 (MULT_EXPR, type,
6548 fold (build2 (MINUS_EXPR, type,
6549 TREE_OPERAND (arg0, 0),
6550 TREE_OPERAND (arg1, 0))),
6551 TREE_OPERAND (arg0, 1)));
6552 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6553 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6554 TREE_OPERAND (arg1, 0), 0))
6555 return fold (build2 (MULT_EXPR, type,
6556 TREE_OPERAND (arg0, 0),
6557 fold (build2 (MINUS_EXPR, type,
6558 TREE_OPERAND (arg0, 1),
6559 TREE_OPERAND (arg1, 1)))));
6565 /* (-A) * (-B) -> A * B */
6566 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6567 return fold (build2 (MULT_EXPR, type,
6568 TREE_OPERAND (arg0, 0),
6569 negate_expr (arg1)));
6570 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6571 return fold (build2 (MULT_EXPR, type,
6573 TREE_OPERAND (arg1, 0)));
6575 if (! FLOAT_TYPE_P (type))
6577 if (integer_zerop (arg1))
6578 return omit_one_operand (type, arg1, arg0);
6579 if (integer_onep (arg1))
6580 return non_lvalue (fold_convert (type, arg0));
6582 /* (a * (1 << b)) is (a << b) */
6583 if (TREE_CODE (arg1) == LSHIFT_EXPR
6584 && integer_onep (TREE_OPERAND (arg1, 0)))
6585 return fold (build2 (LSHIFT_EXPR, type, arg0,
6586 TREE_OPERAND (arg1, 1)));
6587 if (TREE_CODE (arg0) == LSHIFT_EXPR
6588 && integer_onep (TREE_OPERAND (arg0, 0)))
6589 return fold (build2 (LSHIFT_EXPR, type, arg1,
6590 TREE_OPERAND (arg0, 1)));
6592 if (TREE_CODE (arg1) == INTEGER_CST
6593 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6594 fold_convert (type, arg1),
6596 return fold_convert (type, tem);
6601 /* Maybe fold x * 0 to 0. The expressions aren't the same
6602 when x is NaN, since x * 0 is also NaN. Nor are they the
6603 same in modes with signed zeros, since multiplying a
6604 negative value by 0 gives -0, not +0. */
6605 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6607 && real_zerop (arg1))
6608 return omit_one_operand (type, arg1, arg0);
6609 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6610 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6611 && real_onep (arg1))
6612 return non_lvalue (fold_convert (type, arg0));
6614 /* Transform x * -1.0 into -x. */
6615 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6616 && real_minus_onep (arg1))
6617 return fold_convert (type, negate_expr (arg0));
6619 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6620 if (flag_unsafe_math_optimizations
6621 && TREE_CODE (arg0) == RDIV_EXPR
6622 && TREE_CODE (arg1) == REAL_CST
6623 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6625 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6628 return fold (build2 (RDIV_EXPR, type, tem,
6629 TREE_OPERAND (arg0, 1)));
6632 if (flag_unsafe_math_optimizations)
6634 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6635 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6637 /* Optimizations of root(...)*root(...). */
6638 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6640 tree rootfn, arg, arglist;
6641 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6642 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6644 /* Optimize sqrt(x)*sqrt(x) as x. */
6645 if (BUILTIN_SQRT_P (fcode0)
6646 && operand_equal_p (arg00, arg10, 0)
6647 && ! HONOR_SNANS (TYPE_MODE (type)))
6650 /* Optimize root(x)*root(y) as root(x*y). */
6651 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6652 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
6653 arglist = build_tree_list (NULL_TREE, arg);
6654 return build_function_call_expr (rootfn, arglist);
6657 /* Optimize expN(x)*expN(y) as expN(x+y). */
6658 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6660 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6661 tree arg = build2 (PLUS_EXPR, type,
6662 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6663 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6664 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6665 return build_function_call_expr (expfn, arglist);
6668 /* Optimizations of pow(...)*pow(...). */
6669 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6670 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6671 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6673 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6674 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6676 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6677 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6680 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6681 if (operand_equal_p (arg01, arg11, 0))
6683 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6684 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
6685 tree arglist = tree_cons (NULL_TREE, fold (arg),
6686 build_tree_list (NULL_TREE,
6688 return build_function_call_expr (powfn, arglist);
6691 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6692 if (operand_equal_p (arg00, arg10, 0))
6694 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6695 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
6696 tree arglist = tree_cons (NULL_TREE, arg00,
6697 build_tree_list (NULL_TREE,
6699 return build_function_call_expr (powfn, arglist);
6703 /* Optimize tan(x)*cos(x) as sin(x). */
6704 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6705 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6706 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6707 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6708 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6709 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6710 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6711 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6713 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6715 if (sinfn != NULL_TREE)
6716 return build_function_call_expr (sinfn,
6717 TREE_OPERAND (arg0, 1));
6720 /* Optimize x*pow(x,c) as pow(x,c+1). */
6721 if (fcode1 == BUILT_IN_POW
6722 || fcode1 == BUILT_IN_POWF
6723 || fcode1 == BUILT_IN_POWL)
6725 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6726 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6728 if (TREE_CODE (arg11) == REAL_CST
6729 && ! TREE_CONSTANT_OVERFLOW (arg11)
6730 && operand_equal_p (arg0, arg10, 0))
6732 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6736 c = TREE_REAL_CST (arg11);
6737 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6738 arg = build_real (type, c);
6739 arglist = build_tree_list (NULL_TREE, arg);
6740 arglist = tree_cons (NULL_TREE, arg0, arglist);
6741 return build_function_call_expr (powfn, arglist);
6745 /* Optimize pow(x,c)*x as pow(x,c+1). */
6746 if (fcode0 == BUILT_IN_POW
6747 || fcode0 == BUILT_IN_POWF
6748 || fcode0 == BUILT_IN_POWL)
6750 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6751 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6753 if (TREE_CODE (arg01) == REAL_CST
6754 && ! TREE_CONSTANT_OVERFLOW (arg01)
6755 && operand_equal_p (arg1, arg00, 0))
6757 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6761 c = TREE_REAL_CST (arg01);
6762 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6763 arg = build_real (type, c);
6764 arglist = build_tree_list (NULL_TREE, arg);
6765 arglist = tree_cons (NULL_TREE, arg1, arglist);
6766 return build_function_call_expr (powfn, arglist);
6770 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6772 && operand_equal_p (arg0, arg1, 0))
6774 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6778 tree arg = build_real (type, dconst2);
6779 tree arglist = build_tree_list (NULL_TREE, arg);
6780 arglist = tree_cons (NULL_TREE, arg0, arglist);
6781 return build_function_call_expr (powfn, arglist);
6790 if (integer_all_onesp (arg1))
6791 return omit_one_operand (type, arg1, arg0);
6792 if (integer_zerop (arg1))
6793 return non_lvalue (fold_convert (type, arg0));
6794 if (operand_equal_p (arg0, arg1, 0))
6795 return non_lvalue (fold_convert (type, arg0));
6796 t1 = distribute_bit_expr (code, type, arg0, arg1);
6797 if (t1 != NULL_TREE)
6800 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6802 This results in more efficient code for machines without a NAND
6803 instruction. Combine will canonicalize to the first form
6804 which will allow use of NAND instructions provided by the
6805 backend if they exist. */
6806 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6807 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6809 return fold (build1 (BIT_NOT_EXPR, type,
6810 build2 (BIT_AND_EXPR, type,
6811 TREE_OPERAND (arg0, 0),
6812 TREE_OPERAND (arg1, 0))));
6815 /* See if this can be simplified into a rotate first. If that
6816 is unsuccessful continue in the association code. */
6820 if (integer_zerop (arg1))
6821 return non_lvalue (fold_convert (type, arg0));
6822 if (integer_all_onesp (arg1))
6823 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6824 if (operand_equal_p (arg0, arg1, 0))
6825 return omit_one_operand (type, integer_zero_node, arg0);
6827 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6828 with a constant, and the two constants have no bits in common,
6829 we should treat this as a BIT_IOR_EXPR since this may produce more
6831 if (TREE_CODE (arg0) == BIT_AND_EXPR
6832 && TREE_CODE (arg1) == BIT_AND_EXPR
6833 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6834 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6835 && integer_zerop (const_binop (BIT_AND_EXPR,
6836 TREE_OPERAND (arg0, 1),
6837 TREE_OPERAND (arg1, 1), 0)))
6839 code = BIT_IOR_EXPR;
6843 /* See if this can be simplified into a rotate first. If that
6844 is unsuccessful continue in the association code. */
6848 if (integer_all_onesp (arg1))
6849 return non_lvalue (fold_convert (type, arg0));
6850 if (integer_zerop (arg1))
6851 return omit_one_operand (type, arg1, arg0);
6852 if (operand_equal_p (arg0, arg1, 0))
6853 return non_lvalue (fold_convert (type, arg0));
6854 t1 = distribute_bit_expr (code, type, arg0, arg1);
6855 if (t1 != NULL_TREE)
6857 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6858 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6859 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6862 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6864 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6865 && (~TREE_INT_CST_LOW (arg1)
6866 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6867 return fold_convert (type, TREE_OPERAND (arg0, 0));
6870 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6872 This results in more efficient code for machines without a NOR
6873 instruction. Combine will canonicalize to the first form
6874 which will allow use of NOR instructions provided by the
6875 backend if they exist. */
6876 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6877 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6879 return fold (build1 (BIT_NOT_EXPR, type,
6880 build2 (BIT_IOR_EXPR, type,
6881 TREE_OPERAND (arg0, 0),
6882 TREE_OPERAND (arg1, 0))));
6888 /* Don't touch a floating-point divide by zero unless the mode
6889 of the constant can represent infinity. */
6890 if (TREE_CODE (arg1) == REAL_CST
6891 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6892 && real_zerop (arg1))
6895 /* (-A) / (-B) -> A / B */
6896 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6897 return fold (build2 (RDIV_EXPR, type,
6898 TREE_OPERAND (arg0, 0),
6899 negate_expr (arg1)));
6900 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6901 return fold (build2 (RDIV_EXPR, type,
6903 TREE_OPERAND (arg1, 0)));
6905 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6906 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6907 && real_onep (arg1))
6908 return non_lvalue (fold_convert (type, arg0));
6910 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6911 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6912 && real_minus_onep (arg1))
6913 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6915 /* If ARG1 is a constant, we can convert this to a multiply by the
6916 reciprocal. This does not have the same rounding properties,
6917 so only do this if -funsafe-math-optimizations. We can actually
6918 always safely do it if ARG1 is a power of two, but it's hard to
6919 tell if it is or not in a portable manner. */
6920 if (TREE_CODE (arg1) == REAL_CST)
6922 if (flag_unsafe_math_optimizations
6923 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6925 return fold (build2 (MULT_EXPR, type, arg0, tem));
6926 /* Find the reciprocal if optimizing and the result is exact. */
6930 r = TREE_REAL_CST (arg1);
6931 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6933 tem = build_real (type, r);
6934 return fold (build2 (MULT_EXPR, type, arg0, tem));
6938 /* Convert A/B/C to A/(B*C). */
6939 if (flag_unsafe_math_optimizations
6940 && TREE_CODE (arg0) == RDIV_EXPR)
6941 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6942 fold (build2 (MULT_EXPR, type,
6943 TREE_OPERAND (arg0, 1), arg1))));
6945 /* Convert A/(B/C) to (A/B)*C. */
6946 if (flag_unsafe_math_optimizations
6947 && TREE_CODE (arg1) == RDIV_EXPR)
6948 return fold (build2 (MULT_EXPR, type,
6949 fold (build2 (RDIV_EXPR, type, arg0,
6950 TREE_OPERAND (arg1, 0))),
6951 TREE_OPERAND (arg1, 1)));
6953 /* Convert C1/(X*C2) into (C1/C2)/X. */
6954 if (flag_unsafe_math_optimizations
6955 && TREE_CODE (arg1) == MULT_EXPR
6956 && TREE_CODE (arg0) == REAL_CST
6957 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6959 tree tem = const_binop (RDIV_EXPR, arg0,
6960 TREE_OPERAND (arg1, 1), 0);
6962 return fold (build2 (RDIV_EXPR, type, tem,
6963 TREE_OPERAND (arg1, 0)));
6966 if (flag_unsafe_math_optimizations)
6968 enum built_in_function fcode = builtin_mathfn_code (arg1);
6969 /* Optimize x/expN(y) into x*expN(-y). */
6970 if (BUILTIN_EXPONENT_P (fcode))
6972 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6973 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
6974 tree arglist = build_tree_list (NULL_TREE,
6975 fold_convert (type, arg));
6976 arg1 = build_function_call_expr (expfn, arglist);
6977 return fold (build2 (MULT_EXPR, type, arg0, arg1));
6980 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6981 if (fcode == BUILT_IN_POW
6982 || fcode == BUILT_IN_POWF
6983 || fcode == BUILT_IN_POWL)
6985 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6986 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6987 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6988 tree neg11 = fold_convert (type, negate_expr (arg11));
6989 tree arglist = tree_cons(NULL_TREE, arg10,
6990 build_tree_list (NULL_TREE, neg11));
6991 arg1 = build_function_call_expr (powfn, arglist);
6992 return fold (build2 (MULT_EXPR, type, arg0, arg1));
6996 if (flag_unsafe_math_optimizations)
6998 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6999 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7001 /* Optimize sin(x)/cos(x) as tan(x). */
7002 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7003 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7004 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7005 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7006 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7008 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7010 if (tanfn != NULL_TREE)
7011 return build_function_call_expr (tanfn,
7012 TREE_OPERAND (arg0, 1));
7015 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7016 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7017 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7018 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7019 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7020 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7022 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7024 if (tanfn != NULL_TREE)
7026 tree tmp = TREE_OPERAND (arg0, 1);
7027 tmp = build_function_call_expr (tanfn, tmp);
7028 return fold (build2 (RDIV_EXPR, type,
7029 build_real (type, dconst1), tmp));
7033 /* Optimize pow(x,c)/x as pow(x,c-1). */
7034 if (fcode0 == BUILT_IN_POW
7035 || fcode0 == BUILT_IN_POWF
7036 || fcode0 == BUILT_IN_POWL)
7038 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7039 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7040 if (TREE_CODE (arg01) == REAL_CST
7041 && ! TREE_CONSTANT_OVERFLOW (arg01)
7042 && operand_equal_p (arg1, arg00, 0))
7044 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7048 c = TREE_REAL_CST (arg01);
7049 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7050 arg = build_real (type, c);
7051 arglist = build_tree_list (NULL_TREE, arg);
7052 arglist = tree_cons (NULL_TREE, arg1, arglist);
7053 return build_function_call_expr (powfn, arglist);
7059 case TRUNC_DIV_EXPR:
7060 case ROUND_DIV_EXPR:
7061 case FLOOR_DIV_EXPR:
7063 case EXACT_DIV_EXPR:
7064 if (integer_onep (arg1))
7065 return non_lvalue (fold_convert (type, arg0));
7066 if (integer_zerop (arg1))
7069 if (!TYPE_UNSIGNED (type)
7070 && TREE_CODE (arg1) == INTEGER_CST
7071 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7072 && TREE_INT_CST_HIGH (arg1) == -1)
7073 return fold_convert (type, negate_expr (arg0));
7075 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7076 operation, EXACT_DIV_EXPR.
7078 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7079 At one time others generated faster code, it's not clear if they do
7080 after the last round to changes to the DIV code in expmed.c. */
7081 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7082 && multiple_of_p (type, arg0, arg1))
7083 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7085 if (TREE_CODE (arg1) == INTEGER_CST
7086 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7088 return fold_convert (type, tem);
7093 case FLOOR_MOD_EXPR:
7094 case ROUND_MOD_EXPR:
7095 case TRUNC_MOD_EXPR:
7096 if (integer_onep (arg1))
7097 return omit_one_operand (type, integer_zero_node, arg0);
7098 if (integer_zerop (arg1))
7100 /* X % -1 is zero. */
7101 if (!TYPE_UNSIGNED (type)
7102 && TREE_CODE (arg1) == INTEGER_CST
7103 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7104 && TREE_INT_CST_HIGH (arg1) == -1)
7105 return omit_one_operand (type, integer_zero_node, arg0);
7107 if (TREE_CODE (arg1) == INTEGER_CST
7108 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7110 return fold_convert (type, tem);
7116 if (integer_all_onesp (arg0))
7117 return omit_one_operand (type, arg0, arg1);
7121 /* Optimize -1 >> x for arithmetic right shifts. */
7122 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7123 return omit_one_operand (type, arg0, arg1);
7124 /* ... fall through ... */
7128 if (integer_zerop (arg1))
7129 return non_lvalue (fold_convert (type, arg0));
7130 if (integer_zerop (arg0))
7131 return omit_one_operand (type, arg0, arg1);
7133 /* Since negative shift count is not well-defined,
7134 don't try to compute it in the compiler. */
7135 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7137 /* Rewrite an LROTATE_EXPR by a constant into an
7138 RROTATE_EXPR by a new constant. */
7139 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7141 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7142 tem = fold_convert (TREE_TYPE (arg1), tem);
7143 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7144 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7147 /* If we have a rotate of a bit operation with the rotate count and
7148 the second operand of the bit operation both constant,
7149 permute the two operations. */
7150 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7151 && (TREE_CODE (arg0) == BIT_AND_EXPR
7152 || TREE_CODE (arg0) == BIT_IOR_EXPR
7153 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7154 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7155 return fold (build2 (TREE_CODE (arg0), type,
7156 fold (build2 (code, type,
7157 TREE_OPERAND (arg0, 0), arg1)),
7158 fold (build2 (code, type,
7159 TREE_OPERAND (arg0, 1), arg1))));
7161 /* Two consecutive rotates adding up to the width of the mode can
7163 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7164 && TREE_CODE (arg0) == RROTATE_EXPR
7165 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7166 && TREE_INT_CST_HIGH (arg1) == 0
7167 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7168 && ((TREE_INT_CST_LOW (arg1)
7169 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7170 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7171 return TREE_OPERAND (arg0, 0);
7176 if (operand_equal_p (arg0, arg1, 0))
7177 return omit_one_operand (type, arg0, arg1);
7178 if (INTEGRAL_TYPE_P (type)
7179 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7180 return omit_one_operand (type, arg1, arg0);
7184 if (operand_equal_p (arg0, arg1, 0))
7185 return omit_one_operand (type, arg0, arg1);
7186 if (INTEGRAL_TYPE_P (type)
7187 && TYPE_MAX_VALUE (type)
7188 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7189 return omit_one_operand (type, arg1, arg0);
7192 case TRUTH_NOT_EXPR:
7193 /* The argument to invert_truthvalue must have Boolean type. */
7194 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7195 arg0 = fold_convert (boolean_type_node, arg0);
7197 /* Note that the operand of this must be an int
7198 and its values must be 0 or 1.
7199 ("true" is a fixed value perhaps depending on the language,
7200 but we don't handle values other than 1 correctly yet.) */
7201 tem = invert_truthvalue (arg0);
7202 /* Avoid infinite recursion. */
7203 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7205 tem = fold_single_bit_test (code, arg0, arg1, type);
7210 return fold_convert (type, tem);
7212 case TRUTH_ANDIF_EXPR:
7213 /* Note that the operands of this must be ints
7214 and their values must be 0 or 1.
7215 ("true" is a fixed value perhaps depending on the language.) */
7216 /* If first arg is constant zero, return it. */
7217 if (integer_zerop (arg0))
7218 return fold_convert (type, arg0);
7219 case TRUTH_AND_EXPR:
7220 /* If either arg is constant true, drop it. */
7221 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7222 return non_lvalue (fold_convert (type, arg1));
7223 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7224 /* Preserve sequence points. */
7225 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7226 return non_lvalue (fold_convert (type, arg0));
7227 /* If second arg is constant zero, result is zero, but first arg
7228 must be evaluated. */
7229 if (integer_zerop (arg1))
7230 return omit_one_operand (type, arg1, arg0);
7231 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7232 case will be handled here. */
7233 if (integer_zerop (arg0))
7234 return omit_one_operand (type, arg0, arg1);
7237 /* We only do these simplifications if we are optimizing. */
7241 /* Check for things like (A || B) && (A || C). We can convert this
7242 to A || (B && C). Note that either operator can be any of the four
7243 truth and/or operations and the transformation will still be
7244 valid. Also note that we only care about order for the
7245 ANDIF and ORIF operators. If B contains side effects, this
7246 might change the truth-value of A. */
7247 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7248 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7249 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7250 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7251 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7252 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7254 tree a00 = TREE_OPERAND (arg0, 0);
7255 tree a01 = TREE_OPERAND (arg0, 1);
7256 tree a10 = TREE_OPERAND (arg1, 0);
7257 tree a11 = TREE_OPERAND (arg1, 1);
7258 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7259 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7260 && (code == TRUTH_AND_EXPR
7261 || code == TRUTH_OR_EXPR));
7263 if (operand_equal_p (a00, a10, 0))
7264 return fold (build2 (TREE_CODE (arg0), type, a00,
7265 fold (build2 (code, type, a01, a11))));
7266 else if (commutative && operand_equal_p (a00, a11, 0))
7267 return fold (build2 (TREE_CODE (arg0), type, a00,
7268 fold (build2 (code, type, a01, a10))));
7269 else if (commutative && operand_equal_p (a01, a10, 0))
7270 return fold (build2 (TREE_CODE (arg0), type, a01,
7271 fold (build2 (code, type, a00, a11))));
7273 /* This case if tricky because we must either have commutative
7274 operators or else A10 must not have side-effects. */
7276 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7277 && operand_equal_p (a01, a11, 0))
7278 return fold (build2 (TREE_CODE (arg0), type,
7279 fold (build2 (code, type, a00, a10)),
7283 /* See if we can build a range comparison. */
7284 if (0 != (tem = fold_range_test (t)))
7287 /* Check for the possibility of merging component references. If our
7288 lhs is another similar operation, try to merge its rhs with our
7289 rhs. Then try to merge our lhs and rhs. */
7290 if (TREE_CODE (arg0) == code
7291 && 0 != (tem = fold_truthop (code, type,
7292 TREE_OPERAND (arg0, 1), arg1)))
7293 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7295 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7300 case TRUTH_ORIF_EXPR:
7301 /* Note that the operands of this must be ints
7302 and their values must be 0 or true.
7303 ("true" is a fixed value perhaps depending on the language.) */
7304 /* If first arg is constant true, return it. */
7305 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7306 return fold_convert (type, arg0);
7308 /* If either arg is constant zero, drop it. */
7309 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7310 return non_lvalue (fold_convert (type, arg1));
7311 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7312 /* Preserve sequence points. */
7313 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7314 return non_lvalue (fold_convert (type, arg0));
7315 /* If second arg is constant true, result is true, but we must
7316 evaluate first arg. */
7317 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7318 return omit_one_operand (type, arg1, arg0);
7319 /* Likewise for first arg, but note this only occurs here for
7321 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7322 return omit_one_operand (type, arg0, arg1);
7325 case TRUTH_XOR_EXPR:
7326 /* If either arg is constant zero, drop it. */
7327 if (integer_zerop (arg0))
7328 return non_lvalue (fold_convert (type, arg1));
7329 if (integer_zerop (arg1))
7330 return non_lvalue (fold_convert (type, arg0));
7331 /* If either arg is constant true, this is a logical inversion. */
7332 if (integer_onep (arg0))
7333 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7334 if (integer_onep (arg1))
7335 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7336 /* Identical arguments cancel to zero. */
7337 if (operand_equal_p (arg0, arg1, 0))
7338 return omit_one_operand (type, integer_zero_node, arg0);
7347 /* If one arg is a real or integer constant, put it last. */
7348 if (tree_swap_operands_p (arg0, arg1, true))
7349 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7351 /* If this is an equality comparison of the address of a non-weak
7352 object against zero, then we know the result. */
7353 if ((code == EQ_EXPR || code == NE_EXPR)
7354 && TREE_CODE (arg0) == ADDR_EXPR
7355 && DECL_P (TREE_OPERAND (arg0, 0))
7356 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7357 && integer_zerop (arg1))
7358 return constant_boolean_node (code != EQ_EXPR, type);
7360 /* If this is an equality comparison of the address of two non-weak,
7361 unaliased symbols neither of which are extern (since we do not
7362 have access to attributes for externs), then we know the result. */
7363 if ((code == EQ_EXPR || code == NE_EXPR)
7364 && TREE_CODE (arg0) == ADDR_EXPR
7365 && DECL_P (TREE_OPERAND (arg0, 0))
7366 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7367 && ! lookup_attribute ("alias",
7368 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7369 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7370 && TREE_CODE (arg1) == ADDR_EXPR
7371 && DECL_P (TREE_OPERAND (arg1, 0))
7372 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7373 && ! lookup_attribute ("alias",
7374 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7375 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7376 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7377 ? code == EQ_EXPR : code != EQ_EXPR,
7380 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7382 tree targ0 = strip_float_extensions (arg0);
7383 tree targ1 = strip_float_extensions (arg1);
7384 tree newtype = TREE_TYPE (targ0);
7386 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7387 newtype = TREE_TYPE (targ1);
7389 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7390 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7391 return fold (build2 (code, type, fold_convert (newtype, targ0),
7392 fold_convert (newtype, targ1)));
7394 /* (-a) CMP (-b) -> b CMP a */
7395 if (TREE_CODE (arg0) == NEGATE_EXPR
7396 && TREE_CODE (arg1) == NEGATE_EXPR)
7397 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7398 TREE_OPERAND (arg0, 0)));
7400 if (TREE_CODE (arg1) == REAL_CST)
7402 REAL_VALUE_TYPE cst;
7403 cst = TREE_REAL_CST (arg1);
7405 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7406 if (TREE_CODE (arg0) == NEGATE_EXPR)
7408 fold (build2 (swap_tree_comparison (code), type,
7409 TREE_OPERAND (arg0, 0),
7410 build_real (TREE_TYPE (arg1),
7411 REAL_VALUE_NEGATE (cst))));
7413 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7414 /* a CMP (-0) -> a CMP 0 */
7415 if (REAL_VALUE_MINUS_ZERO (cst))
7416 return fold (build2 (code, type, arg0,
7417 build_real (TREE_TYPE (arg1), dconst0)));
7419 /* x != NaN is always true, other ops are always false. */
7420 if (REAL_VALUE_ISNAN (cst)
7421 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7423 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7424 return omit_one_operand (type, tem, arg0);
7427 /* Fold comparisons against infinity. */
7428 if (REAL_VALUE_ISINF (cst))
7430 tem = fold_inf_compare (code, type, arg0, arg1);
7431 if (tem != NULL_TREE)
7436 /* If this is a comparison of a real constant with a PLUS_EXPR
7437 or a MINUS_EXPR of a real constant, we can convert it into a
7438 comparison with a revised real constant as long as no overflow
7439 occurs when unsafe_math_optimizations are enabled. */
7440 if (flag_unsafe_math_optimizations
7441 && TREE_CODE (arg1) == REAL_CST
7442 && (TREE_CODE (arg0) == PLUS_EXPR
7443 || TREE_CODE (arg0) == MINUS_EXPR)
7444 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7445 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7446 ? MINUS_EXPR : PLUS_EXPR,
7447 arg1, TREE_OPERAND (arg0, 1), 0))
7448 && ! TREE_CONSTANT_OVERFLOW (tem))
7449 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7451 /* Likewise, we can simplify a comparison of a real constant with
7452 a MINUS_EXPR whose first operand is also a real constant, i.e.
7453 (c1 - x) < c2 becomes x > c1-c2. */
7454 if (flag_unsafe_math_optimizations
7455 && TREE_CODE (arg1) == REAL_CST
7456 && TREE_CODE (arg0) == MINUS_EXPR
7457 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7458 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7460 && ! TREE_CONSTANT_OVERFLOW (tem))
7461 return fold (build2 (swap_tree_comparison (code), type,
7462 TREE_OPERAND (arg0, 1), tem));
7464 /* Fold comparisons against built-in math functions. */
7465 if (TREE_CODE (arg1) == REAL_CST
7466 && flag_unsafe_math_optimizations
7467 && ! flag_errno_math)
7469 enum built_in_function fcode = builtin_mathfn_code (arg0);
7471 if (fcode != END_BUILTINS)
7473 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7474 if (tem != NULL_TREE)
7480 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7481 if (TREE_CONSTANT (arg1)
7482 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7483 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7484 /* This optimization is invalid for ordered comparisons
7485 if CONST+INCR overflows or if foo+incr might overflow.
7486 This optimization is invalid for floating point due to rounding.
7487 For pointer types we assume overflow doesn't happen. */
7488 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7489 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7490 && (code == EQ_EXPR || code == NE_EXPR))))
7492 tree varop, newconst;
7494 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7496 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7497 arg1, TREE_OPERAND (arg0, 1)));
7498 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7499 TREE_OPERAND (arg0, 0),
7500 TREE_OPERAND (arg0, 1));
7504 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7505 arg1, TREE_OPERAND (arg0, 1)));
7506 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7507 TREE_OPERAND (arg0, 0),
7508 TREE_OPERAND (arg0, 1));
7512 /* If VAROP is a reference to a bitfield, we must mask
7513 the constant by the width of the field. */
7514 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7515 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7517 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7518 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7519 tree folded_compare, shift;
7521 /* First check whether the comparison would come out
7522 always the same. If we don't do that we would
7523 change the meaning with the masking. */
7524 folded_compare = fold (build2 (code, type,
7525 TREE_OPERAND (varop, 0),
7527 if (integer_zerop (folded_compare)
7528 || integer_onep (folded_compare))
7529 return omit_one_operand (type, folded_compare, varop);
7531 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7533 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7535 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7539 return fold (build2 (code, type, varop, newconst));
7542 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7543 This transformation affects the cases which are handled in later
7544 optimizations involving comparisons with non-negative constants. */
7545 if (TREE_CODE (arg1) == INTEGER_CST
7546 && TREE_CODE (arg0) != INTEGER_CST
7547 && tree_int_cst_sgn (arg1) > 0)
7552 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7553 return fold (build2 (GT_EXPR, type, arg0, arg1));
7556 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7557 return fold (build2 (LE_EXPR, type, arg0, arg1));
7564 /* Comparisons with the highest or lowest possible integer of
7565 the specified size will have known values.
7567 This is quite similar to fold_relational_hi_lo; however, my
7568 attempts to share the code have been nothing but trouble.
7569 I give up for now. */
7571 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7573 if (TREE_CODE (arg1) == INTEGER_CST
7574 && ! TREE_CONSTANT_OVERFLOW (arg1)
7575 && width <= HOST_BITS_PER_WIDE_INT
7576 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7577 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7579 unsigned HOST_WIDE_INT signed_max;
7580 unsigned HOST_WIDE_INT max, min;
7582 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7584 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7586 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7592 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7595 if (TREE_INT_CST_HIGH (arg1) == 0
7596 && TREE_INT_CST_LOW (arg1) == max)
7600 return omit_one_operand (type, integer_zero_node, arg0);
7603 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7606 return omit_one_operand (type, integer_one_node, arg0);
7609 return fold (build2 (NE_EXPR, type, arg0, arg1));
7611 /* The GE_EXPR and LT_EXPR cases above are not normally
7612 reached because of previous transformations. */
7617 else if (TREE_INT_CST_HIGH (arg1) == 0
7618 && TREE_INT_CST_LOW (arg1) == max - 1)
7622 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7623 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7625 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7626 return fold (build2 (NE_EXPR, type, arg0, arg1));
7630 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7631 && TREE_INT_CST_LOW (arg1) == min)
7635 return omit_one_operand (type, integer_zero_node, arg0);
7638 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7641 return omit_one_operand (type, integer_one_node, arg0);
7644 return fold (build2 (NE_EXPR, type, arg0, arg1));
7649 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7650 && TREE_INT_CST_LOW (arg1) == min + 1)
7654 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7655 return fold (build2 (NE_EXPR, type, arg0, arg1));
7657 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7658 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7663 else if (!in_gimple_form
7664 && TREE_INT_CST_HIGH (arg1) == 0
7665 && TREE_INT_CST_LOW (arg1) == signed_max
7666 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7667 /* signed_type does not work on pointer types. */
7668 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7670 /* The following case also applies to X < signed_max+1
7671 and X >= signed_max+1 because previous transformations. */
7672 if (code == LE_EXPR || code == GT_EXPR)
7675 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7676 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7678 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7679 type, fold_convert (st0, arg0),
7680 fold_convert (st1, integer_zero_node)));
7686 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7687 a MINUS_EXPR of a constant, we can convert it into a comparison with
7688 a revised constant as long as no overflow occurs. */
7689 if ((code == EQ_EXPR || code == NE_EXPR)
7690 && TREE_CODE (arg1) == INTEGER_CST
7691 && (TREE_CODE (arg0) == PLUS_EXPR
7692 || TREE_CODE (arg0) == MINUS_EXPR)
7693 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7694 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7695 ? MINUS_EXPR : PLUS_EXPR,
7696 arg1, TREE_OPERAND (arg0, 1), 0))
7697 && ! TREE_CONSTANT_OVERFLOW (tem))
7698 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7700 /* Similarly for a NEGATE_EXPR. */
7701 else if ((code == EQ_EXPR || code == NE_EXPR)
7702 && TREE_CODE (arg0) == NEGATE_EXPR
7703 && TREE_CODE (arg1) == INTEGER_CST
7704 && 0 != (tem = negate_expr (arg1))
7705 && TREE_CODE (tem) == INTEGER_CST
7706 && ! TREE_CONSTANT_OVERFLOW (tem))
7707 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7709 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7710 for !=. Don't do this for ordered comparisons due to overflow. */
7711 else if ((code == NE_EXPR || code == EQ_EXPR)
7712 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7713 return fold (build2 (code, type,
7714 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7716 /* If we are widening one operand of an integer comparison,
7717 see if the other operand is similarly being widened. Perhaps we
7718 can do the comparison in the narrower type. */
7719 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7720 && TREE_CODE (arg0) == NOP_EXPR
7721 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7722 && (code == EQ_EXPR || code == NE_EXPR
7723 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7724 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7725 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7726 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7727 || (TREE_CODE (t1) == INTEGER_CST
7728 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7729 return fold (build2 (code, type, tem,
7730 fold_convert (TREE_TYPE (tem), t1)));
7732 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7733 constant, we can simplify it. */
7734 else if (TREE_CODE (arg1) == INTEGER_CST
7735 && (TREE_CODE (arg0) == MIN_EXPR
7736 || TREE_CODE (arg0) == MAX_EXPR)
7737 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7738 return optimize_minmax_comparison (t);
7740 /* If we are comparing an ABS_EXPR with a constant, we can
7741 convert all the cases into explicit comparisons, but they may
7742 well not be faster than doing the ABS and one comparison.
7743 But ABS (X) <= C is a range comparison, which becomes a subtraction
7744 and a comparison, and is probably faster. */
7745 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7746 && TREE_CODE (arg0) == ABS_EXPR
7747 && ! TREE_SIDE_EFFECTS (arg0)
7748 && (0 != (tem = negate_expr (arg1)))
7749 && TREE_CODE (tem) == INTEGER_CST
7750 && ! TREE_CONSTANT_OVERFLOW (tem))
7751 return fold (build2 (TRUTH_ANDIF_EXPR, type,
7752 build2 (GE_EXPR, type,
7753 TREE_OPERAND (arg0, 0), tem),
7754 build2 (LE_EXPR, type,
7755 TREE_OPERAND (arg0, 0), arg1)));
7757 /* If this is an EQ or NE comparison with zero and ARG0 is
7758 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7759 two operations, but the latter can be done in one less insn
7760 on machines that have only two-operand insns or on which a
7761 constant cannot be the first operand. */
7762 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7763 && TREE_CODE (arg0) == BIT_AND_EXPR)
7765 tree arg00 = TREE_OPERAND (arg0, 0);
7766 tree arg01 = TREE_OPERAND (arg0, 1);
7767 if (TREE_CODE (arg00) == LSHIFT_EXPR
7768 && integer_onep (TREE_OPERAND (arg00, 0)))
7770 fold (build2 (code, type,
7771 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7772 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
7773 arg01, TREE_OPERAND (arg00, 1)),
7774 fold_convert (TREE_TYPE (arg0),
7777 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7778 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7780 fold (build2 (code, type,
7781 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7782 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
7783 arg00, TREE_OPERAND (arg01, 1)),
7784 fold_convert (TREE_TYPE (arg0),
7789 /* If this is an NE or EQ comparison of zero against the result of a
7790 signed MOD operation whose second operand is a power of 2, make
7791 the MOD operation unsigned since it is simpler and equivalent. */
7792 if ((code == NE_EXPR || code == EQ_EXPR)
7793 && integer_zerop (arg1)
7794 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7795 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7796 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7797 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7798 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7799 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7801 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7802 tree newmod = build2 (TREE_CODE (arg0), newtype,
7803 fold_convert (newtype,
7804 TREE_OPERAND (arg0, 0)),
7805 fold_convert (newtype,
7806 TREE_OPERAND (arg0, 1)));
7808 return build2 (code, type, newmod, fold_convert (newtype, arg1));
7811 /* If this is an NE comparison of zero with an AND of one, remove the
7812 comparison since the AND will give the correct value. */
7813 if (code == NE_EXPR && integer_zerop (arg1)
7814 && TREE_CODE (arg0) == BIT_AND_EXPR
7815 && integer_onep (TREE_OPERAND (arg0, 1)))
7816 return fold_convert (type, arg0);
7818 /* If we have (A & C) == C where C is a power of 2, convert this into
7819 (A & C) != 0. Similarly for NE_EXPR. */
7820 if ((code == EQ_EXPR || code == NE_EXPR)
7821 && TREE_CODE (arg0) == BIT_AND_EXPR
7822 && integer_pow2p (TREE_OPERAND (arg0, 1))
7823 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7824 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7825 arg0, integer_zero_node));
7827 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7828 2, then fold the expression into shifts and logical operations. */
7829 tem = fold_single_bit_test (code, arg0, arg1, type);
7833 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7834 Similarly for NE_EXPR. */
7835 if ((code == EQ_EXPR || code == NE_EXPR)
7836 && TREE_CODE (arg0) == BIT_AND_EXPR
7837 && TREE_CODE (arg1) == INTEGER_CST
7838 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7841 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7842 arg1, build1 (BIT_NOT_EXPR,
7843 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7844 TREE_OPERAND (arg0, 1))));
7845 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7846 if (integer_nonzerop (dandnotc))
7847 return omit_one_operand (type, rslt, arg0);
7850 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7851 Similarly for NE_EXPR. */
7852 if ((code == EQ_EXPR || code == NE_EXPR)
7853 && TREE_CODE (arg0) == BIT_IOR_EXPR
7854 && TREE_CODE (arg1) == INTEGER_CST
7855 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7858 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7859 TREE_OPERAND (arg0, 1),
7860 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7861 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7862 if (integer_nonzerop (candnotd))
7863 return omit_one_operand (type, rslt, arg0);
7866 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7867 and similarly for >= into !=. */
7868 if ((code == LT_EXPR || code == GE_EXPR)
7869 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7870 && TREE_CODE (arg1) == LSHIFT_EXPR
7871 && integer_onep (TREE_OPERAND (arg1, 0)))
7872 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7873 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7874 TREE_OPERAND (arg1, 1)),
7875 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7877 else if ((code == LT_EXPR || code == GE_EXPR)
7878 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7879 && (TREE_CODE (arg1) == NOP_EXPR
7880 || TREE_CODE (arg1) == CONVERT_EXPR)
7881 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7882 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7884 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7885 fold_convert (TREE_TYPE (arg0),
7886 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7887 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7889 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7891 /* Simplify comparison of something with itself. (For IEEE
7892 floating-point, we can only do some of these simplifications.) */
7893 if (operand_equal_p (arg0, arg1, 0))
7898 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7899 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7900 return constant_boolean_node (1, type);
7905 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7906 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7907 return constant_boolean_node (1, type);
7908 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7911 /* For NE, we can only do this simplification if integer
7912 or we don't honor IEEE floating point NaNs. */
7913 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7914 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7916 /* ... fall through ... */
7919 return constant_boolean_node (0, type);
7925 /* If we are comparing an expression that just has comparisons
7926 of two integer values, arithmetic expressions of those comparisons,
7927 and constants, we can simplify it. There are only three cases
7928 to check: the two values can either be equal, the first can be
7929 greater, or the second can be greater. Fold the expression for
7930 those three values. Since each value must be 0 or 1, we have
7931 eight possibilities, each of which corresponds to the constant 0
7932 or 1 or one of the six possible comparisons.
7934 This handles common cases like (a > b) == 0 but also handles
7935 expressions like ((x > y) - (y > x)) > 0, which supposedly
7936 occur in macroized code. */
7938 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7940 tree cval1 = 0, cval2 = 0;
7943 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7944 /* Don't handle degenerate cases here; they should already
7945 have been handled anyway. */
7946 && cval1 != 0 && cval2 != 0
7947 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7948 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7949 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7950 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7951 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7952 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7953 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7955 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7956 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7958 /* We can't just pass T to eval_subst in case cval1 or cval2
7959 was the same as ARG1. */
7962 = fold (build2 (code, type,
7963 eval_subst (arg0, cval1, maxval,
7967 = fold (build2 (code, type,
7968 eval_subst (arg0, cval1, maxval,
7972 = fold (build2 (code, type,
7973 eval_subst (arg0, cval1, minval,
7977 /* All three of these results should be 0 or 1. Confirm they
7978 are. Then use those values to select the proper code
7981 if ((integer_zerop (high_result)
7982 || integer_onep (high_result))
7983 && (integer_zerop (equal_result)
7984 || integer_onep (equal_result))
7985 && (integer_zerop (low_result)
7986 || integer_onep (low_result)))
7988 /* Make a 3-bit mask with the high-order bit being the
7989 value for `>', the next for '=', and the low for '<'. */
7990 switch ((integer_onep (high_result) * 4)
7991 + (integer_onep (equal_result) * 2)
7992 + integer_onep (low_result))
7996 return omit_one_operand (type, integer_zero_node, arg0);
8017 return omit_one_operand (type, integer_one_node, arg0);
8020 tem = build2 (code, type, cval1, cval2);
8022 return save_expr (tem);
8029 /* If this is a comparison of a field, we may be able to simplify it. */
8030 if (((TREE_CODE (arg0) == COMPONENT_REF
8031 && lang_hooks.can_use_bit_fields_p ())
8032 || TREE_CODE (arg0) == BIT_FIELD_REF)
8033 && (code == EQ_EXPR || code == NE_EXPR)
8034 /* Handle the constant case even without -O
8035 to make sure the warnings are given. */
8036 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8038 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8043 /* If this is a comparison of complex values and either or both sides
8044 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8045 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8046 This may prevent needless evaluations. */
8047 if ((code == EQ_EXPR || code == NE_EXPR)
8048 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8049 && (TREE_CODE (arg0) == COMPLEX_EXPR
8050 || TREE_CODE (arg1) == COMPLEX_EXPR
8051 || TREE_CODE (arg0) == COMPLEX_CST
8052 || TREE_CODE (arg1) == COMPLEX_CST))
8054 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8055 tree real0, imag0, real1, imag1;
8057 arg0 = save_expr (arg0);
8058 arg1 = save_expr (arg1);
8059 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8060 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8061 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8062 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8064 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8067 fold (build2 (code, type, real0, real1)),
8068 fold (build2 (code, type, imag0, imag1))));
8071 /* Optimize comparisons of strlen vs zero to a compare of the
8072 first character of the string vs zero. To wit,
8073 strlen(ptr) == 0 => *ptr == 0
8074 strlen(ptr) != 0 => *ptr != 0
8075 Other cases should reduce to one of these two (or a constant)
8076 due to the return value of strlen being unsigned. */
8077 if ((code == EQ_EXPR || code == NE_EXPR)
8078 && integer_zerop (arg1)
8079 && TREE_CODE (arg0) == CALL_EXPR)
8081 tree fndecl = get_callee_fndecl (arg0);
8085 && DECL_BUILT_IN (fndecl)
8086 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8087 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8088 && (arglist = TREE_OPERAND (arg0, 1))
8089 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8090 && ! TREE_CHAIN (arglist))
8091 return fold (build2 (code, type,
8092 build1 (INDIRECT_REF, char_type_node,
8093 TREE_VALUE(arglist)),
8094 integer_zero_node));
8097 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8098 into a single range test. */
8099 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8100 && TREE_CODE (arg1) == INTEGER_CST
8101 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8102 && !integer_zerop (TREE_OPERAND (arg0, 1))
8103 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8104 && !TREE_OVERFLOW (arg1))
8106 t1 = fold_div_compare (code, type, arg0, arg1);
8107 if (t1 != NULL_TREE)
8111 /* Both ARG0 and ARG1 are known to be constants at this point. */
8112 t1 = fold_relational_const (code, type, arg0, arg1);
8113 return (t1 == NULL_TREE ? t : t1);
8116 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8117 so all simple results must be passed through pedantic_non_lvalue. */
8118 if (TREE_CODE (arg0) == INTEGER_CST)
8120 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8121 /* Only optimize constant conditions when the selected branch
8122 has the same type as the COND_EXPR. This avoids optimizing
8123 away "c ? x : throw", where the throw has a void type. */
8124 if (! VOID_TYPE_P (TREE_TYPE (tem))
8125 || VOID_TYPE_P (type))
8126 return pedantic_non_lvalue (tem);
8129 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8130 return pedantic_omit_one_operand (type, arg1, arg0);
8132 /* If we have A op B ? A : C, we may be able to convert this to a
8133 simpler expression, depending on the operation and the values
8134 of B and C. Signed zeros prevent all of these transformations,
8135 for reasons given above each one. */
8137 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8138 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8139 arg1, TREE_OPERAND (arg0, 1))
8140 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8142 tree arg2 = TREE_OPERAND (t, 2);
8143 enum tree_code comp_code = TREE_CODE (arg0);
8147 /* If we have A op 0 ? A : -A, consider applying the following
8150 A == 0? A : -A same as -A
8151 A != 0? A : -A same as A
8152 A >= 0? A : -A same as abs (A)
8153 A > 0? A : -A same as abs (A)
8154 A <= 0? A : -A same as -abs (A)
8155 A < 0? A : -A same as -abs (A)
8157 None of these transformations work for modes with signed
8158 zeros. If A is +/-0, the first two transformations will
8159 change the sign of the result (from +0 to -0, or vice
8160 versa). The last four will fix the sign of the result,
8161 even though the original expressions could be positive or
8162 negative, depending on the sign of A.
8164 Note that all these transformations are correct if A is
8165 NaN, since the two alternatives (A and -A) are also NaNs. */
8166 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8167 ? real_zerop (TREE_OPERAND (arg0, 1))
8168 : integer_zerop (TREE_OPERAND (arg0, 1)))
8169 && TREE_CODE (arg2) == NEGATE_EXPR
8170 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8174 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8175 tem = fold_convert (type, negate_expr (tem));
8176 return pedantic_non_lvalue (tem);
8178 return pedantic_non_lvalue (fold_convert (type, arg1));
8181 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8182 arg1 = fold_convert (lang_hooks.types.signed_type
8183 (TREE_TYPE (arg1)), arg1);
8184 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8185 return pedantic_non_lvalue (fold_convert (type, arg1));
8188 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8189 arg1 = fold_convert (lang_hooks.types.signed_type
8190 (TREE_TYPE (arg1)), arg1);
8191 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8192 arg1 = negate_expr (fold_convert (type, arg1));
8193 return pedantic_non_lvalue (arg1);
8198 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8199 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8200 both transformations are correct when A is NaN: A != 0
8201 is then true, and A == 0 is false. */
8203 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8205 if (comp_code == NE_EXPR)
8206 return pedantic_non_lvalue (fold_convert (type, arg1));
8207 else if (comp_code == EQ_EXPR)
8208 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8211 /* Try some transformations of A op B ? A : B.
8213 A == B? A : B same as B
8214 A != B? A : B same as A
8215 A >= B? A : B same as max (A, B)
8216 A > B? A : B same as max (B, A)
8217 A <= B? A : B same as min (A, B)
8218 A < B? A : B same as min (B, A)
8220 As above, these transformations don't work in the presence
8221 of signed zeros. For example, if A and B are zeros of
8222 opposite sign, the first two transformations will change
8223 the sign of the result. In the last four, the original
8224 expressions give different results for (A=+0, B=-0) and
8225 (A=-0, B=+0), but the transformed expressions do not.
8227 The first two transformations are correct if either A or B
8228 is a NaN. In the first transformation, the condition will
8229 be false, and B will indeed be chosen. In the case of the
8230 second transformation, the condition A != B will be true,
8231 and A will be chosen.
8233 The conversions to max() and min() are not correct if B is
8234 a number and A is not. The conditions in the original
8235 expressions will be false, so all four give B. The min()
8236 and max() versions would give a NaN instead. */
8237 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8238 arg2, TREE_OPERAND (arg0, 0)))
8240 tree comp_op0 = TREE_OPERAND (arg0, 0);
8241 tree comp_op1 = TREE_OPERAND (arg0, 1);
8242 tree comp_type = TREE_TYPE (comp_op0);
8244 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8245 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8255 return pedantic_non_lvalue (fold_convert (type, arg2));
8257 return pedantic_non_lvalue (fold_convert (type, arg1));
8260 /* In C++ a ?: expression can be an lvalue, so put the
8261 operand which will be used if they are equal first
8262 so that we can convert this back to the
8263 corresponding COND_EXPR. */
8264 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8265 return pedantic_non_lvalue (fold_convert
8266 (type, fold (build2 (MIN_EXPR, comp_type,
8267 (comp_code == LE_EXPR
8268 ? comp_op0 : comp_op1),
8269 (comp_code == LE_EXPR
8270 ? comp_op1 : comp_op0)))));
8274 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8275 return pedantic_non_lvalue (fold_convert
8276 (type, fold (build2 (MAX_EXPR, comp_type,
8277 (comp_code == GE_EXPR
8278 ? comp_op0 : comp_op1),
8279 (comp_code == GE_EXPR
8280 ? comp_op1 : comp_op0)))));
8287 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8288 we might still be able to simplify this. For example,
8289 if C1 is one less or one more than C2, this might have started
8290 out as a MIN or MAX and been transformed by this function.
8291 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8293 if (INTEGRAL_TYPE_P (type)
8294 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8295 && TREE_CODE (arg2) == INTEGER_CST)
8299 /* We can replace A with C1 in this case. */
8300 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8301 return fold (build3 (code, type, TREE_OPERAND (t, 0), arg1,
8302 TREE_OPERAND (t, 2)));
8305 /* If C1 is C2 + 1, this is min(A, C2). */
8306 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8308 && operand_equal_p (TREE_OPERAND (arg0, 1),
8309 const_binop (PLUS_EXPR, arg2,
8310 integer_one_node, 0),
8312 return pedantic_non_lvalue
8313 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8317 /* If C1 is C2 - 1, this is min(A, C2). */
8318 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8320 && operand_equal_p (TREE_OPERAND (arg0, 1),
8321 const_binop (MINUS_EXPR, arg2,
8322 integer_one_node, 0),
8324 return pedantic_non_lvalue
8325 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8329 /* If C1 is C2 - 1, this is max(A, C2). */
8330 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8332 && operand_equal_p (TREE_OPERAND (arg0, 1),
8333 const_binop (MINUS_EXPR, arg2,
8334 integer_one_node, 0),
8336 return pedantic_non_lvalue
8337 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8341 /* If C1 is C2 + 1, this is max(A, C2). */
8342 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8344 && operand_equal_p (TREE_OPERAND (arg0, 1),
8345 const_binop (PLUS_EXPR, arg2,
8346 integer_one_node, 0),
8348 return pedantic_non_lvalue
8349 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8358 /* If the second operand is simpler than the third, swap them
8359 since that produces better jump optimization results. */
8360 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8361 TREE_OPERAND (t, 2), false))
8363 /* See if this can be inverted. If it can't, possibly because
8364 it was a floating-point inequality comparison, don't do
8366 tem = invert_truthvalue (arg0);
8368 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8369 return fold (build3 (code, type, tem,
8370 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8373 /* Convert A ? 1 : 0 to simply A. */
8374 if (integer_onep (TREE_OPERAND (t, 1))
8375 && integer_zerop (TREE_OPERAND (t, 2))
8376 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8377 call to fold will try to move the conversion inside
8378 a COND, which will recurse. In that case, the COND_EXPR
8379 is probably the best choice, so leave it alone. */
8380 && type == TREE_TYPE (arg0))
8381 return pedantic_non_lvalue (arg0);
8383 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8384 over COND_EXPR in cases such as floating point comparisons. */
8385 if (integer_zerop (TREE_OPERAND (t, 1))
8386 && integer_onep (TREE_OPERAND (t, 2))
8387 && truth_value_p (TREE_CODE (arg0)))
8388 return pedantic_non_lvalue (fold_convert (type,
8389 invert_truthvalue (arg0)));
8391 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8392 operation is simply A & 2. */
8394 if (integer_zerop (TREE_OPERAND (t, 2))
8395 && TREE_CODE (arg0) == NE_EXPR
8396 && integer_zerop (TREE_OPERAND (arg0, 1))
8397 && integer_pow2p (arg1)
8398 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8399 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8400 arg1, OEP_ONLY_CONST))
8401 return pedantic_non_lvalue (fold_convert (type,
8402 TREE_OPERAND (arg0, 0)));
8404 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8405 if (integer_zerop (TREE_OPERAND (t, 2))
8406 && truth_value_p (TREE_CODE (arg0))
8407 && truth_value_p (TREE_CODE (arg1)))
8408 return pedantic_non_lvalue (fold (build2 (TRUTH_ANDIF_EXPR, type,
8411 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8412 if (integer_onep (TREE_OPERAND (t, 2))
8413 && truth_value_p (TREE_CODE (arg0))
8414 && truth_value_p (TREE_CODE (arg1)))
8416 /* Only perform transformation if ARG0 is easily inverted. */
8417 tem = invert_truthvalue (arg0);
8418 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8419 return pedantic_non_lvalue (fold (build2 (TRUTH_ORIF_EXPR, type,
8426 /* When pedantic, a compound expression can be neither an lvalue
8427 nor an integer constant expression. */
8428 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8430 /* Don't let (0, 0) be null pointer constant. */
8431 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8432 : fold_convert (type, arg1);
8433 return pedantic_non_lvalue (tem);
8437 return build_complex (type, arg0, arg1);
8441 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8443 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8444 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8445 TREE_OPERAND (arg0, 1));
8446 else if (TREE_CODE (arg0) == COMPLEX_CST)
8447 return TREE_REALPART (arg0);
8448 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8449 return fold (build2 (TREE_CODE (arg0), type,
8450 fold (build1 (REALPART_EXPR, type,
8451 TREE_OPERAND (arg0, 0))),
8452 fold (build1 (REALPART_EXPR, type,
8453 TREE_OPERAND (arg0, 1)))));
8457 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8458 return fold_convert (type, integer_zero_node);
8459 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8460 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8461 TREE_OPERAND (arg0, 0));
8462 else if (TREE_CODE (arg0) == COMPLEX_CST)
8463 return TREE_IMAGPART (arg0);
8464 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8465 return fold (build2 (TREE_CODE (arg0), type,
8466 fold (build1 (IMAGPART_EXPR, type,
8467 TREE_OPERAND (arg0, 0))),
8468 fold (build1 (IMAGPART_EXPR, type,
8469 TREE_OPERAND (arg0, 1)))));
8472 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8474 case CLEANUP_POINT_EXPR:
8475 if (! has_cleanups (arg0))
8476 return TREE_OPERAND (t, 0);
8479 enum tree_code code0 = TREE_CODE (arg0);
8480 int kind0 = TREE_CODE_CLASS (code0);
8481 tree arg00 = TREE_OPERAND (arg0, 0);
8484 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8485 return fold (build1 (code0, type,
8486 fold (build1 (CLEANUP_POINT_EXPR,
8487 TREE_TYPE (arg00), arg00))));
8489 if (kind0 == '<' || kind0 == '2'
8490 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8491 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8492 || code0 == TRUTH_XOR_EXPR)
8494 arg01 = TREE_OPERAND (arg0, 1);
8496 if (TREE_CONSTANT (arg00)
8497 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8498 && ! has_cleanups (arg00)))
8499 return fold (build2 (code0, type, arg00,
8500 fold (build1 (CLEANUP_POINT_EXPR,
8501 TREE_TYPE (arg01), arg01))));
8503 if (TREE_CONSTANT (arg01))
8504 return fold (build2 (code0, type,
8505 fold (build1 (CLEANUP_POINT_EXPR,
8506 TREE_TYPE (arg00), arg00)),
8514 /* Check for a built-in function. */
8515 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8516 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8518 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8520 tree tmp = fold_builtin (t);
8528 } /* switch (code) */
8531 #ifdef ENABLE_FOLD_CHECKING
8534 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8535 static void fold_check_failed (tree, tree);
8536 void print_fold_checksum (tree);
8538 /* When --enable-checking=fold, compute a digest of expr before
8539 and after actual fold call to see if fold did not accidentally
8540 change original expr. */
8547 unsigned char checksum_before[16], checksum_after[16];
8550 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8551 md5_init_ctx (&ctx);
8552 fold_checksum_tree (expr, &ctx, ht);
8553 md5_finish_ctx (&ctx, checksum_before);
8556 ret = fold_1 (expr);
8558 md5_init_ctx (&ctx);
8559 fold_checksum_tree (expr, &ctx, ht);
8560 md5_finish_ctx (&ctx, checksum_after);
8563 if (memcmp (checksum_before, checksum_after, 16))
8564 fold_check_failed (expr, ret);
8570 print_fold_checksum (tree expr)
8573 unsigned char checksum[16], cnt;
8576 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8577 md5_init_ctx (&ctx);
8578 fold_checksum_tree (expr, &ctx, ht);
8579 md5_finish_ctx (&ctx, checksum);
8581 for (cnt = 0; cnt < 16; ++cnt)
8582 fprintf (stderr, "%02x", checksum[cnt]);
8583 putc ('\n', stderr);
8587 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8589 internal_error ("fold check: original tree changed by fold");
8593 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8596 enum tree_code code;
8597 char buf[sizeof (struct tree_decl)];
8600 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8601 > sizeof (struct tree_decl)
8602 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8606 slot = htab_find_slot (ht, expr, INSERT);
8610 code = TREE_CODE (expr);
8611 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8613 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8614 memcpy (buf, expr, tree_size (expr));
8616 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8618 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8620 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8621 memcpy (buf, expr, tree_size (expr));
8623 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8625 else if (TREE_CODE_CLASS (code) == 't'
8626 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8628 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8629 memcpy (buf, expr, tree_size (expr));
8631 TYPE_POINTER_TO (expr) = NULL;
8632 TYPE_REFERENCE_TO (expr) = NULL;
8634 md5_process_bytes (expr, tree_size (expr), ctx);
8635 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8636 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8637 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8638 len = TREE_CODE_LENGTH (code);
8639 switch (TREE_CODE_CLASS (code))
8645 md5_process_bytes (TREE_STRING_POINTER (expr),
8646 TREE_STRING_LENGTH (expr), ctx);
8649 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8650 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8653 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8663 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8664 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8667 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8668 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8677 case SAVE_EXPR: len = 2; break;
8678 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8679 case RTL_EXPR: len = 0; break;
8680 case WITH_CLEANUP_EXPR: len = 2; break;
8689 for (i = 0; i < len; ++i)
8690 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8693 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8694 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8695 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8696 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8697 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8698 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8699 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8700 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8701 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8702 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8703 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8706 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8707 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8708 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8709 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8710 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8711 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8712 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8713 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8714 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8715 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8724 /* Perform constant folding and related simplification of initializer
8725 expression EXPR. This behaves identically to "fold" but ignores
8726 potential run-time traps and exceptions that fold must preserve. */
8729 fold_initializer (tree expr)
8731 int saved_signaling_nans = flag_signaling_nans;
8732 int saved_trapping_math = flag_trapping_math;
8733 int saved_trapv = flag_trapv;
8736 flag_signaling_nans = 0;
8737 flag_trapping_math = 0;
8740 result = fold (expr);
8742 flag_signaling_nans = saved_signaling_nans;
8743 flag_trapping_math = saved_trapping_math;
8744 flag_trapv = saved_trapv;
8749 /* Determine if first argument is a multiple of second argument. Return 0 if
8750 it is not, or we cannot easily determined it to be.
8752 An example of the sort of thing we care about (at this point; this routine
8753 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8754 fold cases do now) is discovering that
8756 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8762 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8764 This code also handles discovering that
8766 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8768 is a multiple of 8 so we don't have to worry about dealing with a
8771 Note that we *look* inside a SAVE_EXPR only to determine how it was
8772 calculated; it is not safe for fold to do much of anything else with the
8773 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8774 at run time. For example, the latter example above *cannot* be implemented
8775 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8776 evaluation time of the original SAVE_EXPR is not necessarily the same at
8777 the time the new expression is evaluated. The only optimization of this
8778 sort that would be valid is changing
8780 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8784 SAVE_EXPR (I) * SAVE_EXPR (J)
8786 (where the same SAVE_EXPR (J) is used in the original and the
8787 transformed version). */
8790 multiple_of_p (tree type, tree top, tree bottom)
8792 if (operand_equal_p (top, bottom, 0))
8795 if (TREE_CODE (type) != INTEGER_TYPE)
8798 switch (TREE_CODE (top))
8801 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8802 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8806 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8807 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8810 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8814 op1 = TREE_OPERAND (top, 1);
8815 /* const_binop may not detect overflow correctly,
8816 so check for it explicitly here. */
8817 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8818 > TREE_INT_CST_LOW (op1)
8819 && TREE_INT_CST_HIGH (op1) == 0
8820 && 0 != (t1 = fold_convert (type,
8821 const_binop (LSHIFT_EXPR,
8824 && ! TREE_OVERFLOW (t1))
8825 return multiple_of_p (type, t1, bottom);
8830 /* Can't handle conversions from non-integral or wider integral type. */
8831 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8832 || (TYPE_PRECISION (type)
8833 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8836 /* .. fall through ... */
8839 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8842 if (TREE_CODE (bottom) != INTEGER_CST
8843 || (TYPE_UNSIGNED (type)
8844 && (tree_int_cst_sgn (top) < 0
8845 || tree_int_cst_sgn (bottom) < 0)))
8847 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8855 /* Return true if `t' is known to be non-negative. */
8858 tree_expr_nonnegative_p (tree t)
8860 switch (TREE_CODE (t))
8866 return tree_int_cst_sgn (t) >= 0;
8869 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8872 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8873 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8874 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8876 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8877 both unsigned and at least 2 bits shorter than the result. */
8878 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8879 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8880 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8882 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8883 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8884 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8885 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8887 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8888 TYPE_PRECISION (inner2)) + 1;
8889 return prec < TYPE_PRECISION (TREE_TYPE (t));
8895 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8897 /* x * x for floating point x is always non-negative. */
8898 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8900 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8901 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8904 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8905 both unsigned and their total bits is shorter than the result. */
8906 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8907 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8908 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8910 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8911 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8912 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8913 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8914 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8915 < TYPE_PRECISION (TREE_TYPE (t));
8919 case TRUNC_DIV_EXPR:
8921 case FLOOR_DIV_EXPR:
8922 case ROUND_DIV_EXPR:
8923 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8924 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8926 case TRUNC_MOD_EXPR:
8928 case FLOOR_MOD_EXPR:
8929 case ROUND_MOD_EXPR:
8930 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8933 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8934 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8937 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8938 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8941 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8942 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8946 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8947 tree outer_type = TREE_TYPE (t);
8949 if (TREE_CODE (outer_type) == REAL_TYPE)
8951 if (TREE_CODE (inner_type) == REAL_TYPE)
8952 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8953 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8955 if (TYPE_UNSIGNED (inner_type))
8957 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8960 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8962 if (TREE_CODE (inner_type) == REAL_TYPE)
8963 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8964 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8965 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8966 && TYPE_UNSIGNED (inner_type);
8972 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8973 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8975 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8977 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8978 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8980 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8981 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8983 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8985 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8987 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8988 case NON_LVALUE_EXPR:
8989 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8991 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8993 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8997 tree fndecl = get_callee_fndecl (t);
8998 tree arglist = TREE_OPERAND (t, 1);
9000 && DECL_BUILT_IN (fndecl)
9001 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9002 switch (DECL_FUNCTION_CODE (fndecl))
9004 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9005 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9006 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9007 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9009 CASE_BUILTIN_F (BUILT_IN_ACOS)
9010 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9011 CASE_BUILTIN_F (BUILT_IN_CABS)
9012 CASE_BUILTIN_F (BUILT_IN_COSH)
9013 CASE_BUILTIN_F (BUILT_IN_ERFC)
9014 CASE_BUILTIN_F (BUILT_IN_EXP)
9015 CASE_BUILTIN_F (BUILT_IN_EXP10)
9016 CASE_BUILTIN_F (BUILT_IN_EXP2)
9017 CASE_BUILTIN_F (BUILT_IN_FABS)
9018 CASE_BUILTIN_F (BUILT_IN_FDIM)
9019 CASE_BUILTIN_F (BUILT_IN_FREXP)
9020 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9021 CASE_BUILTIN_F (BUILT_IN_POW10)
9022 CASE_BUILTIN_I (BUILT_IN_FFS)
9023 CASE_BUILTIN_I (BUILT_IN_PARITY)
9024 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9028 CASE_BUILTIN_F (BUILT_IN_SQRT)
9029 /* sqrt(-0.0) is -0.0. */
9030 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9032 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9034 CASE_BUILTIN_F (BUILT_IN_ASINH)
9035 CASE_BUILTIN_F (BUILT_IN_ATAN)
9036 CASE_BUILTIN_F (BUILT_IN_ATANH)
9037 CASE_BUILTIN_F (BUILT_IN_CBRT)
9038 CASE_BUILTIN_F (BUILT_IN_CEIL)
9039 CASE_BUILTIN_F (BUILT_IN_ERF)
9040 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9041 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9042 CASE_BUILTIN_F (BUILT_IN_FMOD)
9043 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9044 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9045 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9046 CASE_BUILTIN_F (BUILT_IN_LRINT)
9047 CASE_BUILTIN_F (BUILT_IN_LROUND)
9048 CASE_BUILTIN_F (BUILT_IN_MODF)
9049 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9050 CASE_BUILTIN_F (BUILT_IN_POW)
9051 CASE_BUILTIN_F (BUILT_IN_RINT)
9052 CASE_BUILTIN_F (BUILT_IN_ROUND)
9053 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9054 CASE_BUILTIN_F (BUILT_IN_SINH)
9055 CASE_BUILTIN_F (BUILT_IN_TANH)
9056 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9057 /* True if the 1st argument is nonnegative. */
9058 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9060 CASE_BUILTIN_F (BUILT_IN_FMAX)
9061 /* True if the 1st OR 2nd arguments are nonnegative. */
9062 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9063 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9065 CASE_BUILTIN_F (BUILT_IN_FMIN)
9066 /* True if the 1st AND 2nd arguments are nonnegative. */
9067 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9068 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9070 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9071 /* True if the 2nd argument is nonnegative. */
9072 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9076 #undef CASE_BUILTIN_F
9077 #undef CASE_BUILTIN_I
9081 /* ... fall through ... */
9084 if (truth_value_p (TREE_CODE (t)))
9085 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9089 /* We don't know sign of `t', so be conservative and return false. */
9093 /* Return true when T is an address and is known to be nonzero.
9094 For floating point we further ensure that T is not denormal.
9095 Similar logic is present in nonzero_address in rtlanal.h */
9098 tree_expr_nonzero_p (tree t)
9100 tree type = TREE_TYPE (t);
9102 /* Doing something useful for floating point would need more work. */
9103 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9106 switch (TREE_CODE (t))
9109 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9110 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9113 return !integer_zerop (t);
9116 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9118 /* With the presence of negative values it is hard
9119 to say something. */
9120 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9121 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9123 /* One of operands must be positive and the other non-negative. */
9124 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9125 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9130 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9132 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9133 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9139 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9140 tree outer_type = TREE_TYPE (t);
9142 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9143 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9148 /* Weak declarations may link to NULL. */
9149 if (DECL_P (TREE_OPERAND (t, 0)))
9150 return !DECL_WEAK (TREE_OPERAND (t, 0));
9151 /* Constants and all other cases are never weak. */
9155 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9156 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9159 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9160 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9163 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9165 /* When both operands are nonzero, then MAX must be too. */
9166 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9169 /* MAX where operand 0 is positive is positive. */
9170 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9172 /* MAX where operand 1 is positive is positive. */
9173 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9174 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9181 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9184 case NON_LVALUE_EXPR:
9185 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9188 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9189 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9197 /* Return true if `r' is known to be non-negative.
9198 Only handles constants at the moment. */
9201 rtl_expr_nonnegative_p (rtx r)
9203 switch (GET_CODE (r))
9206 return INTVAL (r) >= 0;
9209 if (GET_MODE (r) == VOIDmode)
9210 return CONST_DOUBLE_HIGH (r) >= 0;
9218 units = CONST_VECTOR_NUNITS (r);
9220 for (i = 0; i < units; ++i)
9222 elt = CONST_VECTOR_ELT (r, i);
9223 if (!rtl_expr_nonnegative_p (elt))
9232 /* These are always nonnegative. */
9241 /* See if we are applying CODE, a relational to the highest or lowest
9242 possible integer of TYPE. If so, then the result is a compile
9246 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9251 enum tree_code code = *code_p;
9252 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9254 if (TREE_CODE (op1) == INTEGER_CST
9255 && ! TREE_CONSTANT_OVERFLOW (op1)
9256 && width <= HOST_BITS_PER_WIDE_INT
9257 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9258 || POINTER_TYPE_P (TREE_TYPE (op1))))
9260 unsigned HOST_WIDE_INT signed_max;
9261 unsigned HOST_WIDE_INT max, min;
9263 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9265 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9267 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9273 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9276 if (TREE_INT_CST_HIGH (op1) == 0
9277 && TREE_INT_CST_LOW (op1) == max)
9281 return omit_one_operand (type, integer_zero_node, op0);
9287 return omit_one_operand (type, integer_one_node, op0);
9293 /* The GE_EXPR and LT_EXPR cases above are not normally
9294 reached because of previous transformations. */
9299 else if (TREE_INT_CST_HIGH (op1) == 0
9300 && TREE_INT_CST_LOW (op1) == max - 1)
9305 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9309 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9314 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9315 && TREE_INT_CST_LOW (op1) == min)
9319 return omit_one_operand (type, integer_zero_node, op0);
9326 return omit_one_operand (type, integer_one_node, op0);
9335 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9336 && TREE_INT_CST_LOW (op1) == min + 1)
9341 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9345 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9351 else if (TREE_INT_CST_HIGH (op1) == 0
9352 && TREE_INT_CST_LOW (op1) == signed_max
9353 && TYPE_UNSIGNED (TREE_TYPE (op1))
9354 /* signed_type does not work on pointer types. */
9355 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9357 /* The following case also applies to X < signed_max+1
9358 and X >= signed_max+1 because previous transformations. */
9359 if (code == LE_EXPR || code == GT_EXPR)
9361 tree st0, st1, exp, retval;
9362 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9363 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9365 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9367 fold_convert (st0, op0),
9368 fold_convert (st1, integer_zero_node));
9371 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9373 TREE_OPERAND (exp, 0),
9374 TREE_OPERAND (exp, 1));
9376 /* If we are in gimple form, then returning EXP would create
9377 non-gimple expressions. Clearing it is safe and insures
9378 we do not allow a non-gimple expression to escape. */
9382 return (retval ? retval : exp);
9391 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9392 attempt to fold the expression to a constant without modifying TYPE,
9395 If the expression could be simplified to a constant, then return
9396 the constant. If the expression would not be simplified to a
9397 constant, then return NULL_TREE.
9399 Note this is primarily designed to be called after gimplification
9400 of the tree structures and when at least one operand is a constant.
9401 As a result of those simplifying assumptions this routine is far
9402 simpler than the generic fold routine. */
9405 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9413 /* If this is a commutative operation, and ARG0 is a constant, move it
9414 to ARG1 to reduce the number of tests below. */
9415 if (commutative_tree_code (code)
9416 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9423 /* If either operand is a complex type, extract its real component. */
9424 if (TREE_CODE (op0) == COMPLEX_CST)
9425 subop0 = TREE_REALPART (op0);
9429 if (TREE_CODE (op1) == COMPLEX_CST)
9430 subop1 = TREE_REALPART (op1);
9434 /* Note if either argument is not a real or integer constant.
9435 With a few exceptions, simplification is limited to cases
9436 where both arguments are constants. */
9437 if ((TREE_CODE (subop0) != INTEGER_CST
9438 && TREE_CODE (subop0) != REAL_CST)
9439 || (TREE_CODE (subop1) != INTEGER_CST
9440 && TREE_CODE (subop1) != REAL_CST))
9446 /* (plus (address) (const_int)) is a constant. */
9447 if (TREE_CODE (op0) == PLUS_EXPR
9448 && TREE_CODE (op1) == INTEGER_CST
9449 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9450 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9451 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9453 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9455 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9456 const_binop (PLUS_EXPR, op1,
9457 TREE_OPERAND (op0, 1), 0));
9465 /* Both arguments are constants. Simplify. */
9466 tem = const_binop (code, op0, op1, 0);
9467 if (tem != NULL_TREE)
9469 /* The return value should always have the same type as
9470 the original expression. */
9471 if (TREE_TYPE (tem) != type)
9472 tem = fold_convert (type, tem);
9479 /* Fold &x - &x. This can happen from &x.foo - &x.
9480 This is unsafe for certain floats even in non-IEEE formats.
9481 In IEEE, it is unsafe because it does wrong for NaNs.
9482 Also note that operand_equal_p is always false if an
9483 operand is volatile. */
9484 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9485 return fold_convert (type, integer_zero_node);
9491 /* Special case multiplication or bitwise AND where one argument
9493 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9494 return omit_one_operand (type, op1, op0);
9496 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9497 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9498 && real_zerop (op1))
9499 return omit_one_operand (type, op1, op0);
9504 /* Special case when we know the result will be all ones. */
9505 if (integer_all_onesp (op1))
9506 return omit_one_operand (type, op1, op0);
9510 case TRUNC_DIV_EXPR:
9511 case ROUND_DIV_EXPR:
9512 case FLOOR_DIV_EXPR:
9514 case EXACT_DIV_EXPR:
9515 case TRUNC_MOD_EXPR:
9516 case ROUND_MOD_EXPR:
9517 case FLOOR_MOD_EXPR:
9520 /* Division by zero is undefined. */
9521 if (integer_zerop (op1))
9524 if (TREE_CODE (op1) == REAL_CST
9525 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9526 && real_zerop (op1))
9532 if (INTEGRAL_TYPE_P (type)
9533 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9534 return omit_one_operand (type, op1, op0);
9539 if (INTEGRAL_TYPE_P (type)
9540 && TYPE_MAX_VALUE (type)
9541 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9542 return omit_one_operand (type, op1, op0);
9547 /* Optimize -1 >> x for arithmetic right shifts. */
9548 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9549 return omit_one_operand (type, op0, op1);
9550 /* ... fall through ... */
9553 if (integer_zerop (op0))
9554 return omit_one_operand (type, op0, op1);
9556 /* Since negative shift count is not well-defined, don't
9557 try to compute it in the compiler. */
9558 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9565 /* -1 rotated either direction by any amount is still -1. */
9566 if (integer_all_onesp (op0))
9567 return omit_one_operand (type, op0, op1);
9569 /* 0 rotated either direction by any amount is still zero. */
9570 if (integer_zerop (op0))
9571 return omit_one_operand (type, op0, op1);
9577 return build_complex (type, op0, op1);
9586 /* If one arg is a real or integer constant, put it last. */
9587 if ((TREE_CODE (op0) == INTEGER_CST
9588 && TREE_CODE (op1) != INTEGER_CST)
9589 || (TREE_CODE (op0) == REAL_CST
9590 && TREE_CODE (op0) != REAL_CST))
9597 code = swap_tree_comparison (code);
9600 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9601 This transformation affects the cases which are handled in later
9602 optimizations involving comparisons with non-negative constants. */
9603 if (TREE_CODE (op1) == INTEGER_CST
9604 && TREE_CODE (op0) != INTEGER_CST
9605 && tree_int_cst_sgn (op1) > 0)
9611 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9616 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9624 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
9631 return fold_relational_const (code, type, op0, op1);
9634 /* This could probably be handled. */
9637 case TRUTH_AND_EXPR:
9638 /* If second arg is constant zero, result is zero, but first arg
9639 must be evaluated. */
9640 if (integer_zerop (op1))
9641 return omit_one_operand (type, op1, op0);
9642 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9643 case will be handled here. */
9644 if (integer_zerop (op0))
9645 return omit_one_operand (type, op0, op1);
9646 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9647 return constant_boolean_node (true, type);
9651 /* If second arg is constant true, result is true, but we must
9652 evaluate first arg. */
9653 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
9654 return omit_one_operand (type, op1, op0);
9655 /* Likewise for first arg, but note this only occurs here for
9657 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
9658 return omit_one_operand (type, op0, op1);
9659 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9660 return constant_boolean_node (false, type);
9663 case TRUTH_XOR_EXPR:
9664 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9666 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
9667 return constant_boolean_node (x, type);
9676 /* Given the components of a unary expression CODE, TYPE and OP0,
9677 attempt to fold the expression to a constant without modifying
9680 If the expression could be simplified to a constant, then return
9681 the constant. If the expression would not be simplified to a
9682 constant, then return NULL_TREE.
9684 Note this is primarily designed to be called after gimplification
9685 of the tree structures and when op0 is a constant. As a result
9686 of those simplifying assumptions this routine is far simpler than
9687 the generic fold routine. */
9690 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
9693 /* Make sure we have a suitable constant argument. */
9694 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
9698 if (TREE_CODE (op0) == COMPLEX_CST)
9699 subop = TREE_REALPART (op0);
9703 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
9712 case FIX_TRUNC_EXPR:
9713 case FIX_FLOOR_EXPR:
9715 return fold_convert_const (code, type, op0);
9718 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9719 return fold_negate_const (op0, type);
9724 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9725 return fold_abs_const (op0, type);
9730 if (TREE_CODE (op0) == INTEGER_CST)
9731 return fold_not_const (op0, type);
9736 if (TREE_CODE (op0) == COMPLEX_CST)
9737 return TREE_REALPART (op0);
9742 if (TREE_CODE (op0) == COMPLEX_CST)
9743 return TREE_IMAGPART (op0);
9748 if (TREE_CODE (op0) == COMPLEX_CST
9749 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
9750 return build_complex (type, TREE_REALPART (op0),
9751 negate_expr (TREE_IMAGPART (op0)));
9759 /* If EXP represents referencing an element in a constant string
9760 (either via pointer arithmetic or array indexing), return the
9761 tree representing the value accessed, otherwise return NULL. */
9764 fold_read_from_constant_string (tree exp)
9766 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
9768 tree exp1 = TREE_OPERAND (exp, 0);
9772 if (TREE_CODE (exp) == INDIRECT_REF)
9774 string = string_constant (exp1, &index);
9778 tree domain = TYPE_DOMAIN (TREE_TYPE (exp1));
9779 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
9780 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
9782 /* Optimize the special-case of a zero lower bound.
9784 We convert the low_bound to sizetype to avoid some problems
9785 with constant folding. (E.g. suppose the lower bound is 1,
9786 and its mode is QI. Without the conversion,l (ARRAY
9787 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9788 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9789 if (! integer_zerop (low_bound))
9790 index = size_diffop (index, fold_convert (sizetype, low_bound));
9796 && TREE_CODE (string) == STRING_CST
9797 && TREE_CODE (index) == INTEGER_CST
9798 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
9799 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
9801 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
9802 return fold_convert (TREE_TYPE (exp),
9803 build_int_2 ((TREE_STRING_POINTER (string)
9804 [TREE_INT_CST_LOW (index)]), 0));
9809 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9810 an integer constant or real constant.
9812 TYPE is the type of the result. */
9815 fold_negate_const (tree arg0, tree type)
9819 if (TREE_CODE (arg0) == INTEGER_CST)
9821 unsigned HOST_WIDE_INT low;
9823 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9824 TREE_INT_CST_HIGH (arg0),
9826 t = build_int_2 (low, high);
9827 TREE_TYPE (t) = type;
9829 = (TREE_OVERFLOW (arg0)
9830 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
9831 TREE_CONSTANT_OVERFLOW (t)
9832 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9834 else if (TREE_CODE (arg0) == REAL_CST)
9835 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9836 #ifdef ENABLE_CHECKING
9844 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9845 an integer constant or real constant.
9847 TYPE is the type of the result. */
9850 fold_abs_const (tree arg0, tree type)
9854 if (TREE_CODE (arg0) == INTEGER_CST)
9856 /* If the value is unsigned, then the absolute value is
9857 the same as the ordinary value. */
9858 if (TYPE_UNSIGNED (type))
9860 /* Similarly, if the value is non-negative. */
9861 else if (INT_CST_LT (integer_minus_one_node, arg0))
9863 /* If the value is negative, then the absolute value is
9867 unsigned HOST_WIDE_INT low;
9869 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9870 TREE_INT_CST_HIGH (arg0),
9872 t = build_int_2 (low, high);
9873 TREE_TYPE (t) = type;
9875 = (TREE_OVERFLOW (arg0)
9876 | force_fit_type (t, overflow));
9877 TREE_CONSTANT_OVERFLOW (t)
9878 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9882 else if (TREE_CODE (arg0) == REAL_CST)
9884 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9885 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9889 #ifdef ENABLE_CHECKING
9897 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
9898 constant. TYPE is the type of the result. */
9901 fold_not_const (tree arg0, tree type)
9905 if (TREE_CODE (arg0) == INTEGER_CST)
9907 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
9908 ~ TREE_INT_CST_HIGH (arg0));
9909 TREE_TYPE (t) = type;
9910 force_fit_type (t, 0);
9911 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
9912 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
9914 #ifdef ENABLE_CHECKING
9922 /* Given CODE, a relational operator, the target type, TYPE and two
9923 constant operands OP0 and OP1, return the result of the
9924 relational operation. If the result is not a compile time
9925 constant, then return NULL_TREE. */
9928 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9933 /* From here on, the only cases we handle are when the result is
9934 known to be a constant.
9936 To compute GT, swap the arguments and do LT.
9937 To compute GE, do LT and invert the result.
9938 To compute LE, swap the arguments, do LT and invert the result.
9939 To compute NE, do EQ and invert the result.
9941 Therefore, the code below must handle only EQ and LT. */
9943 if (code == LE_EXPR || code == GT_EXPR)
9945 tem = op0, op0 = op1, op1 = tem;
9946 code = swap_tree_comparison (code);
9949 /* Note that it is safe to invert for real values here because we
9950 will check below in the one case that it matters. */
9954 if (code == NE_EXPR || code == GE_EXPR)
9957 code = invert_tree_comparison (code, false);
9960 /* Compute a result for LT or EQ if args permit;
9961 Otherwise return T. */
9962 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9964 if (code == EQ_EXPR)
9965 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
9967 tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
9968 ? INT_CST_LT_UNSIGNED (op0, op1)
9969 : INT_CST_LT (op0, op1)),
9973 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
9974 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
9975 tem = build_int_2 (0, 0);
9977 /* Two real constants can be compared explicitly. */
9978 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
9980 /* If either operand is a NaN, the result is false with two
9981 exceptions: First, an NE_EXPR is true on NaNs, but that case
9982 is already handled correctly since we will be inverting the
9983 result for NE_EXPR. Second, if we had inverted a LE_EXPR
9984 or a GE_EXPR into a LT_EXPR, we must return true so that it
9985 will be inverted into false. */
9987 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
9988 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
9989 tem = build_int_2 (invert && code == LT_EXPR, 0);
9991 else if (code == EQ_EXPR)
9992 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
9993 TREE_REAL_CST (op1)),
9996 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
9997 TREE_REAL_CST (op1)),
10001 if (tem == NULL_TREE)
10005 TREE_INT_CST_LOW (tem) ^= 1;
10007 TREE_TYPE (tem) = type;
10008 if (TREE_CODE (type) == BOOLEAN_TYPE)
10009 return lang_hooks.truthvalue_conversion (tem);
10013 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10014 avoid confusing the gimplify process. */
10017 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10019 if (TREE_CODE (t) == INDIRECT_REF)
10021 t = TREE_OPERAND (t, 0);
10022 if (TREE_TYPE (t) != ptrtype)
10023 t = build1 (NOP_EXPR, ptrtype, t);
10028 while (TREE_CODE (base) == COMPONENT_REF
10029 || TREE_CODE (base) == ARRAY_REF)
10030 base = TREE_OPERAND (base, 0);
10032 TREE_ADDRESSABLE (base) = 1;
10034 t = build1 (ADDR_EXPR, ptrtype, t);
10041 build_fold_addr_expr (tree t)
10043 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10046 /* Builds an expression for an indirection through T, simplifying some
10050 build_fold_indirect_ref (tree t)
10052 tree type = TREE_TYPE (TREE_TYPE (t));
10057 if (TREE_CODE (sub) == ADDR_EXPR)
10059 tree op = TREE_OPERAND (sub, 0);
10060 tree optype = TREE_TYPE (op);
10062 if (lang_hooks.types_compatible_p (type, optype))
10064 /* *(foo *)&fooarray => fooarray[0] */
10065 else if (TREE_CODE (optype) == ARRAY_TYPE
10066 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10067 return build2 (ARRAY_REF, type, op, size_zero_node);
10070 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10071 subtype = TREE_TYPE (sub);
10072 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10073 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10075 sub = build_fold_indirect_ref (sub);
10076 return build2 (ARRAY_REF, type, sub, size_zero_node);
10079 return build1 (INDIRECT_REF, type, t);
10082 #include "gt-fold-const.h"