1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static enum tree_code invert_tree_comparison (enum tree_code);
74 static enum tree_code swap_tree_comparison (enum tree_code);
75 static int comparison_to_compcode (enum tree_code);
76 static enum tree_code compcode_to_comparison (int);
77 static int truth_value_p (enum tree_code);
78 static int operand_equal_for_comparison_p (tree, tree, tree);
79 static int twoval_comparison_p (tree, tree *, tree *, int *);
80 static tree eval_subst (tree, tree, tree, tree, tree);
81 static tree pedantic_omit_one_operand (tree, tree, tree);
82 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
83 static tree make_bit_field_ref (tree, tree, int, int, int);
84 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
85 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
86 enum machine_mode *, int *, int *,
88 static int all_ones_mask_p (tree, int);
89 static tree sign_bit_p (tree, tree);
90 static int simple_operand_p (tree);
91 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
92 static tree make_range (tree, int *, tree *, tree *);
93 static tree build_range_check (tree, tree, int, tree, tree);
94 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
96 static tree fold_range_test (tree);
97 static tree unextend (tree, int, int, tree);
98 static tree fold_truthop (enum tree_code, tree, tree, tree);
99 static tree optimize_minmax_comparison (tree);
100 static tree extract_muldiv (tree, tree, enum tree_code, tree);
101 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
102 static tree strip_compound_expr (tree, tree);
103 static int multiple_of_p (tree, tree, tree);
104 static tree constant_boolean_node (int, tree);
105 static int count_cond (tree, int);
106 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
108 static bool fold_real_zero_addition_p (tree, tree, int);
109 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
111 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
112 static bool reorder_operands_p (tree, tree);
113 static bool tree_swap_operands_p (tree, tree, bool);
115 static tree fold_negate_const (tree, tree);
116 static tree fold_abs_const (tree, tree);
117 static tree fold_relational_const (enum tree_code, tree, tree, tree);
119 /* The following constants represent a bit based encoding of GCC's
120 comparison operators. This encoding simplifies transformations
121 on relational comparison operators, such as AND and OR. */
122 #define COMPCODE_FALSE 0
123 #define COMPCODE_LT 1
124 #define COMPCODE_EQ 2
125 #define COMPCODE_LE 3
126 #define COMPCODE_GT 4
127 #define COMPCODE_NE 5
128 #define COMPCODE_GE 6
129 #define COMPCODE_TRUE 7
131 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
132 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
133 and SUM1. Then this yields nonzero if overflow occurred during the
136 Overflow occurs if A and B have the same sign, but A and SUM differ in
137 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
139 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
141 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
142 We do that by representing the two-word integer in 4 words, with only
143 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
144 number. The value of the word is LOWPART + HIGHPART * BASE. */
147 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
148 #define HIGHPART(x) \
149 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
150 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
152 /* Unpack a two-word integer into 4 words.
153 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
154 WORDS points to the array of HOST_WIDE_INTs. */
157 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
159 words[0] = LOWPART (low);
160 words[1] = HIGHPART (low);
161 words[2] = LOWPART (hi);
162 words[3] = HIGHPART (hi);
165 /* Pack an array of 4 words into a two-word integer.
166 WORDS points to the array of words.
167 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
170 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
173 *low = words[0] + words[1] * BASE;
174 *hi = words[2] + words[3] * BASE;
177 /* Make the integer constant T valid for its type by setting to 0 or 1 all
178 the bits in the constant that don't belong in the type.
180 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
181 nonzero, a signed overflow has already occurred in calculating T, so
185 force_fit_type (tree t, int overflow)
187 unsigned HOST_WIDE_INT low;
191 if (TREE_CODE (t) == REAL_CST)
193 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
194 Consider doing it via real_convert now. */
198 else if (TREE_CODE (t) != INTEGER_CST)
201 low = TREE_INT_CST_LOW (t);
202 high = TREE_INT_CST_HIGH (t);
204 if (POINTER_TYPE_P (TREE_TYPE (t))
205 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
208 prec = TYPE_PRECISION (TREE_TYPE (t));
210 /* First clear all bits that are beyond the type's precision. */
212 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
214 else if (prec > HOST_BITS_PER_WIDE_INT)
215 TREE_INT_CST_HIGH (t)
216 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
219 TREE_INT_CST_HIGH (t) = 0;
220 if (prec < HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
224 /* Unsigned types do not suffer sign extension or overflow unless they
226 if (TYPE_UNSIGNED (TREE_TYPE (t))
227 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
231 /* If the value's sign bit is set, extend the sign. */
232 if (prec != 2 * HOST_BITS_PER_WIDE_INT
233 && (prec > HOST_BITS_PER_WIDE_INT
234 ? 0 != (TREE_INT_CST_HIGH (t)
236 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
237 : 0 != (TREE_INT_CST_LOW (t)
238 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
240 /* Value is negative:
241 set to 1 all the bits that are outside this type's precision. */
242 if (prec > HOST_BITS_PER_WIDE_INT)
243 TREE_INT_CST_HIGH (t)
244 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
247 TREE_INT_CST_HIGH (t) = -1;
248 if (prec < HOST_BITS_PER_WIDE_INT)
249 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
253 /* Return nonzero if signed overflow occurred. */
255 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
259 /* Add two doubleword integers with doubleword result.
260 Each argument is given as two `HOST_WIDE_INT' pieces.
261 One argument is L1 and H1; the other, L2 and H2.
262 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
265 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
266 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
267 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
269 unsigned HOST_WIDE_INT l;
273 h = h1 + h2 + (l < l1);
277 return OVERFLOW_SUM_SIGN (h1, h2, h);
280 /* Negate a doubleword integer with doubleword result.
281 Return nonzero if the operation overflows, assuming it's signed.
282 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
283 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
286 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
287 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
293 return (*hv & h1) < 0;
303 /* Multiply two doubleword integers with doubleword result.
304 Return nonzero if the operation overflows, assuming it's signed.
305 Each argument is given as two `HOST_WIDE_INT' pieces.
306 One argument is L1 and H1; the other, L2 and H2.
307 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
310 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
311 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
314 HOST_WIDE_INT arg1[4];
315 HOST_WIDE_INT arg2[4];
316 HOST_WIDE_INT prod[4 * 2];
317 unsigned HOST_WIDE_INT carry;
319 unsigned HOST_WIDE_INT toplow, neglow;
320 HOST_WIDE_INT tophigh, neghigh;
322 encode (arg1, l1, h1);
323 encode (arg2, l2, h2);
325 memset (prod, 0, sizeof prod);
327 for (i = 0; i < 4; i++)
330 for (j = 0; j < 4; j++)
333 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
334 carry += arg1[i] * arg2[j];
335 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
337 prod[k] = LOWPART (carry);
338 carry = HIGHPART (carry);
343 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
345 /* Check for overflow by calculating the top half of the answer in full;
346 it should agree with the low half's sign bit. */
347 decode (prod + 4, &toplow, &tophigh);
350 neg_double (l2, h2, &neglow, &neghigh);
351 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 neg_double (l1, h1, &neglow, &neghigh);
356 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
358 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
361 /* Shift the doubleword integer in L1, H1 left by COUNT places
362 keeping only PREC bits of result.
363 Shift right if COUNT is negative.
364 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
365 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
368 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
369 HOST_WIDE_INT count, unsigned int prec,
370 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
372 unsigned HOST_WIDE_INT signmask;
376 rshift_double (l1, h1, -count, prec, lv, hv, arith);
380 if (SHIFT_COUNT_TRUNCATED)
383 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
385 /* Shifting by the host word size is undefined according to the
386 ANSI standard, so we must handle this as a special case. */
390 else if (count >= HOST_BITS_PER_WIDE_INT)
392 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
397 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
398 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
402 /* Sign extend all bits that are beyond the precision. */
404 signmask = -((prec > HOST_BITS_PER_WIDE_INT
405 ? ((unsigned HOST_WIDE_INT) *hv
406 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
407 : (*lv >> (prec - 1))) & 1);
409 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
411 else if (prec >= HOST_BITS_PER_WIDE_INT)
413 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
414 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
419 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
420 *lv |= signmask << prec;
424 /* Shift the doubleword integer in L1, H1 right by COUNT places
425 keeping only PREC bits of result. COUNT must be positive.
426 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
427 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
430 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
431 HOST_WIDE_INT count, unsigned int prec,
432 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
435 unsigned HOST_WIDE_INT signmask;
438 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
441 if (SHIFT_COUNT_TRUNCATED)
444 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
446 /* Shifting by the host word size is undefined according to the
447 ANSI standard, so we must handle this as a special case. */
451 else if (count >= HOST_BITS_PER_WIDE_INT)
454 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
458 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
460 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
463 /* Zero / sign extend all bits that are beyond the precision. */
465 if (count >= (HOST_WIDE_INT)prec)
470 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
472 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
474 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
475 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
480 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
481 *lv |= signmask << (prec - count);
485 /* Rotate the doubleword integer in L1, H1 left by COUNT places
486 keeping only PREC bits of result.
487 Rotate right if COUNT is negative.
488 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
491 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
492 HOST_WIDE_INT count, unsigned int prec,
493 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
495 unsigned HOST_WIDE_INT s1l, s2l;
496 HOST_WIDE_INT s1h, s2h;
502 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
503 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
508 /* Rotate the doubleword integer in L1, H1 left by COUNT places
509 keeping only PREC bits of result. COUNT must be positive.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
514 HOST_WIDE_INT count, unsigned int prec,
515 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
517 unsigned HOST_WIDE_INT s1l, s2l;
518 HOST_WIDE_INT s1h, s2h;
524 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
525 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
530 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
531 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
532 CODE is a tree code for a kind of division, one of
533 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535 It controls how the quotient is rounded to an integer.
536 Return nonzero if the operation overflows.
537 UNS nonzero says do unsigned division. */
540 div_and_round_double (enum tree_code code, int uns,
541 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
542 HOST_WIDE_INT hnum_orig,
543 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
544 HOST_WIDE_INT hden_orig,
545 unsigned HOST_WIDE_INT *lquo,
546 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
550 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
551 HOST_WIDE_INT den[4], quo[4];
553 unsigned HOST_WIDE_INT work;
554 unsigned HOST_WIDE_INT carry = 0;
555 unsigned HOST_WIDE_INT lnum = lnum_orig;
556 HOST_WIDE_INT hnum = hnum_orig;
557 unsigned HOST_WIDE_INT lden = lden_orig;
558 HOST_WIDE_INT hden = hden_orig;
561 if (hden == 0 && lden == 0)
562 overflow = 1, lden = 1;
564 /* Calculate quotient sign and convert operands to unsigned. */
570 /* (minimum integer) / (-1) is the only overflow case. */
571 if (neg_double (lnum, hnum, &lnum, &hnum)
572 && ((HOST_WIDE_INT) lden & hden) == -1)
578 neg_double (lden, hden, &lden, &hden);
582 if (hnum == 0 && hden == 0)
583 { /* single precision */
585 /* This unsigned division rounds toward zero. */
591 { /* trivial case: dividend < divisor */
592 /* hden != 0 already checked. */
599 memset (quo, 0, sizeof quo);
601 memset (num, 0, sizeof num); /* to zero 9th element */
602 memset (den, 0, sizeof den);
604 encode (num, lnum, hnum);
605 encode (den, lden, hden);
607 /* Special code for when the divisor < BASE. */
608 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
610 /* hnum != 0 already checked. */
611 for (i = 4 - 1; i >= 0; i--)
613 work = num[i] + carry * BASE;
614 quo[i] = work / lden;
620 /* Full double precision division,
621 with thanks to Don Knuth's "Seminumerical Algorithms". */
622 int num_hi_sig, den_hi_sig;
623 unsigned HOST_WIDE_INT quo_est, scale;
625 /* Find the highest nonzero divisor digit. */
626 for (i = 4 - 1;; i--)
633 /* Insure that the first digit of the divisor is at least BASE/2.
634 This is required by the quotient digit estimation algorithm. */
636 scale = BASE / (den[den_hi_sig] + 1);
638 { /* scale divisor and dividend */
640 for (i = 0; i <= 4 - 1; i++)
642 work = (num[i] * scale) + carry;
643 num[i] = LOWPART (work);
644 carry = HIGHPART (work);
649 for (i = 0; i <= 4 - 1; i++)
651 work = (den[i] * scale) + carry;
652 den[i] = LOWPART (work);
653 carry = HIGHPART (work);
654 if (den[i] != 0) den_hi_sig = i;
661 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
663 /* Guess the next quotient digit, quo_est, by dividing the first
664 two remaining dividend digits by the high order quotient digit.
665 quo_est is never low and is at most 2 high. */
666 unsigned HOST_WIDE_INT tmp;
668 num_hi_sig = i + den_hi_sig + 1;
669 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
670 if (num[num_hi_sig] != den[den_hi_sig])
671 quo_est = work / den[den_hi_sig];
675 /* Refine quo_est so it's usually correct, and at most one high. */
676 tmp = work - quo_est * den[den_hi_sig];
678 && (den[den_hi_sig - 1] * quo_est
679 > (tmp * BASE + num[num_hi_sig - 2])))
682 /* Try QUO_EST as the quotient digit, by multiplying the
683 divisor by QUO_EST and subtracting from the remaining dividend.
684 Keep in mind that QUO_EST is the I - 1st digit. */
687 for (j = 0; j <= den_hi_sig; j++)
689 work = quo_est * den[j] + carry;
690 carry = HIGHPART (work);
691 work = num[i + j] - LOWPART (work);
692 num[i + j] = LOWPART (work);
693 carry += HIGHPART (work) != 0;
696 /* If quo_est was high by one, then num[i] went negative and
697 we need to correct things. */
698 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
701 carry = 0; /* add divisor back in */
702 for (j = 0; j <= den_hi_sig; j++)
704 work = num[i + j] + den[j] + carry;
705 carry = HIGHPART (work);
706 num[i + j] = LOWPART (work);
709 num [num_hi_sig] += carry;
712 /* Store the quotient digit. */
717 decode (quo, lquo, hquo);
720 /* If result is negative, make it so. */
722 neg_double (*lquo, *hquo, lquo, hquo);
724 /* Compute trial remainder: rem = num - (quo * den) */
725 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
726 neg_double (*lrem, *hrem, lrem, hrem);
727 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
732 case TRUNC_MOD_EXPR: /* round toward zero */
733 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
737 case FLOOR_MOD_EXPR: /* round toward negative infinity */
738 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
741 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
749 case CEIL_MOD_EXPR: /* round toward positive infinity */
750 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
752 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
760 case ROUND_MOD_EXPR: /* round to closest integer */
762 unsigned HOST_WIDE_INT labs_rem = *lrem;
763 HOST_WIDE_INT habs_rem = *hrem;
764 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
765 HOST_WIDE_INT habs_den = hden, htwice;
767 /* Get absolute values. */
769 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
771 neg_double (lden, hden, &labs_den, &habs_den);
773 /* If (2 * abs (lrem) >= abs (lden)) */
774 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
775 labs_rem, habs_rem, <wice, &htwice);
777 if (((unsigned HOST_WIDE_INT) habs_den
778 < (unsigned HOST_WIDE_INT) htwice)
779 || (((unsigned HOST_WIDE_INT) habs_den
780 == (unsigned HOST_WIDE_INT) htwice)
781 && (labs_den < ltwice)))
785 add_double (*lquo, *hquo,
786 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
789 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
801 /* Compute true remainder: rem = num - (quo * den) */
802 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
803 neg_double (*lrem, *hrem, lrem, hrem);
804 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
808 /* Return true if built-in mathematical function specified by CODE
809 preserves the sign of it argument, i.e. -f(x) == f(-x). */
812 negate_mathfn_p (enum built_in_function code)
836 /* Determine whether an expression T can be cheaply negated using
837 the function negate_expr. */
840 negate_expr_p (tree t)
842 unsigned HOST_WIDE_INT val;
849 type = TREE_TYPE (t);
852 switch (TREE_CODE (t))
855 if (TYPE_UNSIGNED (type) || ! flag_trapv)
858 /* Check that -CST will not overflow type. */
859 prec = TYPE_PRECISION (type);
860 if (prec > HOST_BITS_PER_WIDE_INT)
862 if (TREE_INT_CST_LOW (t) != 0)
864 prec -= HOST_BITS_PER_WIDE_INT;
865 val = TREE_INT_CST_HIGH (t);
868 val = TREE_INT_CST_LOW (t);
869 if (prec < HOST_BITS_PER_WIDE_INT)
870 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
871 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
878 return negate_expr_p (TREE_REALPART (t))
879 && negate_expr_p (TREE_IMAGPART (t));
882 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
884 /* -(A + B) -> (-B) - A. */
885 if (negate_expr_p (TREE_OPERAND (t, 1))
886 && reorder_operands_p (TREE_OPERAND (t, 0),
887 TREE_OPERAND (t, 1)))
889 /* -(A + B) -> (-A) - B. */
890 return negate_expr_p (TREE_OPERAND (t, 0));
893 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
894 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
895 && reorder_operands_p (TREE_OPERAND (t, 0),
896 TREE_OPERAND (t, 1));
899 if (TYPE_UNSIGNED (TREE_TYPE (t)))
905 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
906 return negate_expr_p (TREE_OPERAND (t, 1))
907 || negate_expr_p (TREE_OPERAND (t, 0));
911 /* Negate -((double)float) as (double)(-float). */
912 if (TREE_CODE (type) == REAL_TYPE)
914 tree tem = strip_float_extensions (t);
916 return negate_expr_p (tem);
921 /* Negate -f(x) as f(-x). */
922 if (negate_mathfn_p (builtin_mathfn_code (t)))
923 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
927 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
928 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
930 tree op1 = TREE_OPERAND (t, 1);
931 if (TREE_INT_CST_HIGH (op1) == 0
932 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
933 == TREE_INT_CST_LOW (op1))
944 /* Given T, an expression, return the negation of T. Allow for T to be
945 null, in which case return null. */
956 type = TREE_TYPE (t);
959 switch (TREE_CODE (t))
962 tem = fold_negate_const (t, type);
963 if (! TREE_OVERFLOW (tem)
964 || TYPE_UNSIGNED (type)
970 tem = fold_negate_const (t, type);
971 /* Two's complement FP formats, such as c4x, may overflow. */
972 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
973 return fold_convert (type, tem);
978 tree rpart = negate_expr (TREE_REALPART (t));
979 tree ipart = negate_expr (TREE_IMAGPART (t));
981 if ((TREE_CODE (rpart) == REAL_CST
982 && TREE_CODE (ipart) == REAL_CST)
983 || (TREE_CODE (rpart) == INTEGER_CST
984 && TREE_CODE (ipart) == INTEGER_CST))
985 return build_complex (type, rpart, ipart);
990 return fold_convert (type, TREE_OPERAND (t, 0));
993 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
995 /* -(A + B) -> (-B) - A. */
996 if (negate_expr_p (TREE_OPERAND (t, 1))
997 && reorder_operands_p (TREE_OPERAND (t, 0),
998 TREE_OPERAND (t, 1)))
999 return fold_convert (type,
1000 fold (build (MINUS_EXPR, TREE_TYPE (t),
1001 negate_expr (TREE_OPERAND (t, 1)),
1002 TREE_OPERAND (t, 0))));
1003 /* -(A + B) -> (-A) - B. */
1004 if (negate_expr_p (TREE_OPERAND (t, 0)))
1005 return fold_convert (type,
1006 fold (build (MINUS_EXPR, TREE_TYPE (t),
1007 negate_expr (TREE_OPERAND (t, 0)),
1008 TREE_OPERAND (t, 1))));
1013 /* - (A - B) -> B - A */
1014 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1015 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1016 return fold_convert (type,
1017 fold (build (MINUS_EXPR, TREE_TYPE (t),
1018 TREE_OPERAND (t, 1),
1019 TREE_OPERAND (t, 0))));
1023 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1029 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1031 tem = TREE_OPERAND (t, 1);
1032 if (negate_expr_p (tem))
1033 return fold_convert (type,
1034 fold (build (TREE_CODE (t), TREE_TYPE (t),
1035 TREE_OPERAND (t, 0),
1036 negate_expr (tem))));
1037 tem = TREE_OPERAND (t, 0);
1038 if (negate_expr_p (tem))
1039 return fold_convert (type,
1040 fold (build (TREE_CODE (t), TREE_TYPE (t),
1042 TREE_OPERAND (t, 1))));
1047 /* Convert -((double)float) into (double)(-float). */
1048 if (TREE_CODE (type) == REAL_TYPE)
1050 tem = strip_float_extensions (t);
1051 if (tem != t && negate_expr_p (tem))
1052 return fold_convert (type, negate_expr (tem));
1057 /* Negate -f(x) as f(-x). */
1058 if (negate_mathfn_p (builtin_mathfn_code (t))
1059 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1061 tree fndecl, arg, arglist;
1063 fndecl = get_callee_fndecl (t);
1064 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1065 arglist = build_tree_list (NULL_TREE, arg);
1066 return build_function_call_expr (fndecl, arglist);
1071 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1072 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1074 tree op1 = TREE_OPERAND (t, 1);
1075 if (TREE_INT_CST_HIGH (op1) == 0
1076 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1077 == TREE_INT_CST_LOW (op1))
1079 tree ntype = TYPE_UNSIGNED (type)
1080 ? lang_hooks.types.signed_type (type)
1081 : lang_hooks.types.unsigned_type (type);
1082 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1083 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1084 return fold_convert (type, temp);
1093 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1094 return fold_convert (type, tem);
1097 /* Split a tree IN into a constant, literal and variable parts that could be
1098 combined with CODE to make IN. "constant" means an expression with
1099 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1100 commutative arithmetic operation. Store the constant part into *CONP,
1101 the literal in *LITP and return the variable part. If a part isn't
1102 present, set it to null. If the tree does not decompose in this way,
1103 return the entire tree as the variable part and the other parts as null.
1105 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1106 case, we negate an operand that was subtracted. Except if it is a
1107 literal for which we use *MINUS_LITP instead.
1109 If NEGATE_P is true, we are negating all of IN, again except a literal
1110 for which we use *MINUS_LITP instead.
1112 If IN is itself a literal or constant, return it as appropriate.
1114 Note that we do not guarantee that any of the three values will be the
1115 same type as IN, but they will have the same signedness and mode. */
1118 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1119 tree *minus_litp, int negate_p)
1127 /* Strip any conversions that don't change the machine mode or signedness. */
1128 STRIP_SIGN_NOPS (in);
1130 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1132 else if (TREE_CODE (in) == code
1133 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1134 /* We can associate addition and subtraction together (even
1135 though the C standard doesn't say so) for integers because
1136 the value is not affected. For reals, the value might be
1137 affected, so we can't. */
1138 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1139 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1141 tree op0 = TREE_OPERAND (in, 0);
1142 tree op1 = TREE_OPERAND (in, 1);
1143 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1144 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1146 /* First see if either of the operands is a literal, then a constant. */
1147 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1148 *litp = op0, op0 = 0;
1149 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1150 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1152 if (op0 != 0 && TREE_CONSTANT (op0))
1153 *conp = op0, op0 = 0;
1154 else if (op1 != 0 && TREE_CONSTANT (op1))
1155 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1157 /* If we haven't dealt with either operand, this is not a case we can
1158 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1159 if (op0 != 0 && op1 != 0)
1164 var = op1, neg_var_p = neg1_p;
1166 /* Now do any needed negations. */
1168 *minus_litp = *litp, *litp = 0;
1170 *conp = negate_expr (*conp);
1172 var = negate_expr (var);
1174 else if (TREE_CONSTANT (in))
1182 *minus_litp = *litp, *litp = 0;
1183 else if (*minus_litp)
1184 *litp = *minus_litp, *minus_litp = 0;
1185 *conp = negate_expr (*conp);
1186 var = negate_expr (var);
1192 /* Re-associate trees split by the above function. T1 and T2 are either
1193 expressions to associate or null. Return the new expression, if any. If
1194 we build an operation, do it in TYPE and with CODE. */
1197 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1204 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1205 try to fold this since we will have infinite recursion. But do
1206 deal with any NEGATE_EXPRs. */
1207 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1208 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1210 if (code == PLUS_EXPR)
1212 if (TREE_CODE (t1) == NEGATE_EXPR)
1213 return build (MINUS_EXPR, type, fold_convert (type, t2),
1214 fold_convert (type, TREE_OPERAND (t1, 0)));
1215 else if (TREE_CODE (t2) == NEGATE_EXPR)
1216 return build (MINUS_EXPR, type, fold_convert (type, t1),
1217 fold_convert (type, TREE_OPERAND (t2, 0)));
1219 return build (code, type, fold_convert (type, t1),
1220 fold_convert (type, t2));
1223 return fold (build (code, type, fold_convert (type, t1),
1224 fold_convert (type, t2)));
1227 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1228 to produce a new constant.
1230 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1233 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1235 unsigned HOST_WIDE_INT int1l, int2l;
1236 HOST_WIDE_INT int1h, int2h;
1237 unsigned HOST_WIDE_INT low;
1239 unsigned HOST_WIDE_INT garbagel;
1240 HOST_WIDE_INT garbageh;
1242 tree type = TREE_TYPE (arg1);
1243 int uns = TYPE_UNSIGNED (type);
1245 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1247 int no_overflow = 0;
1249 int1l = TREE_INT_CST_LOW (arg1);
1250 int1h = TREE_INT_CST_HIGH (arg1);
1251 int2l = TREE_INT_CST_LOW (arg2);
1252 int2h = TREE_INT_CST_HIGH (arg2);
1257 low = int1l | int2l, hi = int1h | int2h;
1261 low = int1l ^ int2l, hi = int1h ^ int2h;
1265 low = int1l & int2l, hi = int1h & int2h;
1271 /* It's unclear from the C standard whether shifts can overflow.
1272 The following code ignores overflow; perhaps a C standard
1273 interpretation ruling is needed. */
1274 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1282 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1287 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1291 neg_double (int2l, int2h, &low, &hi);
1292 add_double (int1l, int1h, low, hi, &low, &hi);
1293 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1297 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1300 case TRUNC_DIV_EXPR:
1301 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1302 case EXACT_DIV_EXPR:
1303 /* This is a shortcut for a common special case. */
1304 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1305 && ! TREE_CONSTANT_OVERFLOW (arg1)
1306 && ! TREE_CONSTANT_OVERFLOW (arg2)
1307 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1309 if (code == CEIL_DIV_EXPR)
1312 low = int1l / int2l, hi = 0;
1316 /* ... fall through ... */
1318 case ROUND_DIV_EXPR:
1319 if (int2h == 0 && int2l == 1)
1321 low = int1l, hi = int1h;
1324 if (int1l == int2l && int1h == int2h
1325 && ! (int1l == 0 && int1h == 0))
1330 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1331 &low, &hi, &garbagel, &garbageh);
1334 case TRUNC_MOD_EXPR:
1335 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1336 /* This is a shortcut for a common special case. */
1337 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1338 && ! TREE_CONSTANT_OVERFLOW (arg1)
1339 && ! TREE_CONSTANT_OVERFLOW (arg2)
1340 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1342 if (code == CEIL_MOD_EXPR)
1344 low = int1l % int2l, hi = 0;
1348 /* ... fall through ... */
1350 case ROUND_MOD_EXPR:
1351 overflow = div_and_round_double (code, uns,
1352 int1l, int1h, int2l, int2h,
1353 &garbagel, &garbageh, &low, &hi);
1359 low = (((unsigned HOST_WIDE_INT) int1h
1360 < (unsigned HOST_WIDE_INT) int2h)
1361 || (((unsigned HOST_WIDE_INT) int1h
1362 == (unsigned HOST_WIDE_INT) int2h)
1365 low = (int1h < int2h
1366 || (int1h == int2h && int1l < int2l));
1368 if (low == (code == MIN_EXPR))
1369 low = int1l, hi = int1h;
1371 low = int2l, hi = int2h;
1378 /* If this is for a sizetype, can be represented as one (signed)
1379 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1382 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1383 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1384 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1385 return size_int_type_wide (low, type);
1388 t = build_int_2 (low, hi);
1389 TREE_TYPE (t) = TREE_TYPE (arg1);
1394 ? (!uns || is_sizetype) && overflow
1395 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1397 | TREE_OVERFLOW (arg1)
1398 | TREE_OVERFLOW (arg2));
1400 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1401 So check if force_fit_type truncated the value. */
1403 && ! TREE_OVERFLOW (t)
1404 && (TREE_INT_CST_HIGH (t) != hi
1405 || TREE_INT_CST_LOW (t) != low))
1406 TREE_OVERFLOW (t) = 1;
1408 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1409 | TREE_CONSTANT_OVERFLOW (arg1)
1410 | TREE_CONSTANT_OVERFLOW (arg2));
1414 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1415 constant. We assume ARG1 and ARG2 have the same data type, or at least
1416 are the same kind of constant and the same machine mode.
1418 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1421 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1426 if (TREE_CODE (arg1) == INTEGER_CST)
1427 return int_const_binop (code, arg1, arg2, notrunc);
1429 if (TREE_CODE (arg1) == REAL_CST)
1431 enum machine_mode mode;
1434 REAL_VALUE_TYPE value;
1437 d1 = TREE_REAL_CST (arg1);
1438 d2 = TREE_REAL_CST (arg2);
1440 type = TREE_TYPE (arg1);
1441 mode = TYPE_MODE (type);
1443 /* Don't perform operation if we honor signaling NaNs and
1444 either operand is a NaN. */
1445 if (HONOR_SNANS (mode)
1446 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1449 /* Don't perform operation if it would raise a division
1450 by zero exception. */
1451 if (code == RDIV_EXPR
1452 && REAL_VALUES_EQUAL (d2, dconst0)
1453 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1456 /* If either operand is a NaN, just return it. Otherwise, set up
1457 for floating-point trap; we return an overflow. */
1458 if (REAL_VALUE_ISNAN (d1))
1460 else if (REAL_VALUE_ISNAN (d2))
1463 REAL_ARITHMETIC (value, code, d1, d2);
1465 t = build_real (type, real_value_truncate (mode, value));
1468 = (force_fit_type (t, 0)
1469 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1470 TREE_CONSTANT_OVERFLOW (t)
1472 | TREE_CONSTANT_OVERFLOW (arg1)
1473 | TREE_CONSTANT_OVERFLOW (arg2);
1476 if (TREE_CODE (arg1) == COMPLEX_CST)
1478 tree type = TREE_TYPE (arg1);
1479 tree r1 = TREE_REALPART (arg1);
1480 tree i1 = TREE_IMAGPART (arg1);
1481 tree r2 = TREE_REALPART (arg2);
1482 tree i2 = TREE_IMAGPART (arg2);
1488 t = build_complex (type,
1489 const_binop (PLUS_EXPR, r1, r2, notrunc),
1490 const_binop (PLUS_EXPR, i1, i2, notrunc));
1494 t = build_complex (type,
1495 const_binop (MINUS_EXPR, r1, r2, notrunc),
1496 const_binop (MINUS_EXPR, i1, i2, notrunc));
1500 t = build_complex (type,
1501 const_binop (MINUS_EXPR,
1502 const_binop (MULT_EXPR,
1504 const_binop (MULT_EXPR,
1507 const_binop (PLUS_EXPR,
1508 const_binop (MULT_EXPR,
1510 const_binop (MULT_EXPR,
1518 = const_binop (PLUS_EXPR,
1519 const_binop (MULT_EXPR, r2, r2, notrunc),
1520 const_binop (MULT_EXPR, i2, i2, notrunc),
1523 t = build_complex (type,
1525 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1526 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1527 const_binop (PLUS_EXPR,
1528 const_binop (MULT_EXPR, r1, r2,
1530 const_binop (MULT_EXPR, i1, i2,
1533 magsquared, notrunc),
1535 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1536 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1537 const_binop (MINUS_EXPR,
1538 const_binop (MULT_EXPR, i1, r2,
1540 const_binop (MULT_EXPR, r1, i2,
1543 magsquared, notrunc));
1555 /* These are the hash table functions for the hash table of INTEGER_CST
1556 nodes of a sizetype. */
1558 /* Return the hash code code X, an INTEGER_CST. */
1561 size_htab_hash (const void *x)
1565 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1566 ^ htab_hash_pointer (TREE_TYPE (t))
1567 ^ (TREE_OVERFLOW (t) << 20));
1570 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1571 is the same as that given by *Y, which is the same. */
1574 size_htab_eq (const void *x, const void *y)
1579 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1580 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1581 && TREE_TYPE (xt) == TREE_TYPE (yt)
1582 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1585 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1586 bits are given by NUMBER and of the sizetype represented by KIND. */
1589 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1591 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1594 /* Likewise, but the desired type is specified explicitly. */
1596 static GTY (()) tree new_const;
1597 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1601 size_int_type_wide (HOST_WIDE_INT number, tree type)
1607 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1608 new_const = make_node (INTEGER_CST);
1611 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1612 hash table, we return the value from the hash table. Otherwise, we
1613 place that in the hash table and make a new node for the next time. */
1614 TREE_INT_CST_LOW (new_const) = number;
1615 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1616 TREE_TYPE (new_const) = type;
1617 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1618 = force_fit_type (new_const, 0);
1620 slot = htab_find_slot (size_htab, new_const, INSERT);
1626 new_const = make_node (INTEGER_CST);
1630 return (tree) *slot;
1633 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1634 is a tree code. The type of the result is taken from the operands.
1635 Both must be the same type integer type and it must be a size type.
1636 If the operands are constant, so is the result. */
1639 size_binop (enum tree_code code, tree arg0, tree arg1)
1641 tree type = TREE_TYPE (arg0);
1643 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1644 || type != TREE_TYPE (arg1))
1647 /* Handle the special case of two integer constants faster. */
1648 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1650 /* And some specific cases even faster than that. */
1651 if (code == PLUS_EXPR && integer_zerop (arg0))
1653 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1654 && integer_zerop (arg1))
1656 else if (code == MULT_EXPR && integer_onep (arg0))
1659 /* Handle general case of two integer constants. */
1660 return int_const_binop (code, arg0, arg1, 0);
1663 if (arg0 == error_mark_node || arg1 == error_mark_node)
1664 return error_mark_node;
1666 return fold (build (code, type, arg0, arg1));
1669 /* Given two values, either both of sizetype or both of bitsizetype,
1670 compute the difference between the two values. Return the value
1671 in signed type corresponding to the type of the operands. */
1674 size_diffop (tree arg0, tree arg1)
1676 tree type = TREE_TYPE (arg0);
1679 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1680 || type != TREE_TYPE (arg1))
1683 /* If the type is already signed, just do the simple thing. */
1684 if (!TYPE_UNSIGNED (type))
1685 return size_binop (MINUS_EXPR, arg0, arg1);
1687 ctype = (type == bitsizetype || type == ubitsizetype
1688 ? sbitsizetype : ssizetype);
1690 /* If either operand is not a constant, do the conversions to the signed
1691 type and subtract. The hardware will do the right thing with any
1692 overflow in the subtraction. */
1693 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1694 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1695 fold_convert (ctype, arg1));
1697 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1698 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1699 overflow) and negate (which can't either). Special-case a result
1700 of zero while we're here. */
1701 if (tree_int_cst_equal (arg0, arg1))
1702 return fold_convert (ctype, integer_zero_node);
1703 else if (tree_int_cst_lt (arg1, arg0))
1704 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1706 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1707 fold_convert (ctype, size_binop (MINUS_EXPR,
1712 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1713 type TYPE. If no simplification can be done return NULL_TREE. */
1716 fold_convert_const (enum tree_code code, tree type, tree arg1)
1721 if (TREE_TYPE (arg1) == type)
1724 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1726 if (TREE_CODE (arg1) == INTEGER_CST)
1728 /* If we would build a constant wider than GCC supports,
1729 leave the conversion unfolded. */
1730 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1733 /* If we are trying to make a sizetype for a small integer, use
1734 size_int to pick up cached types to reduce duplicate nodes. */
1735 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1736 && !TREE_CONSTANT_OVERFLOW (arg1)
1737 && compare_tree_int (arg1, 10000) < 0)
1738 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1740 /* Given an integer constant, make new constant with new type,
1741 appropriately sign-extended or truncated. */
1742 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1743 TREE_INT_CST_HIGH (arg1));
1744 TREE_TYPE (t) = type;
1745 /* Indicate an overflow if (1) ARG1 already overflowed,
1746 or (2) force_fit_type indicates an overflow.
1747 Tell force_fit_type that an overflow has already occurred
1748 if ARG1 is a too-large unsigned value and T is signed.
1749 But don't indicate an overflow if converting a pointer. */
1751 = ((force_fit_type (t,
1752 (TREE_INT_CST_HIGH (arg1) < 0
1753 && (TYPE_UNSIGNED (type)
1754 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1755 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1756 || TREE_OVERFLOW (arg1));
1757 TREE_CONSTANT_OVERFLOW (t)
1758 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1761 else if (TREE_CODE (arg1) == REAL_CST)
1763 /* The following code implements the floating point to integer
1764 conversion rules required by the Java Language Specification,
1765 that IEEE NaNs are mapped to zero and values that overflow
1766 the target precision saturate, i.e. values greater than
1767 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1768 are mapped to INT_MIN. These semantics are allowed by the
1769 C and C++ standards that simply state that the behavior of
1770 FP-to-integer conversion is unspecified upon overflow. */
1772 HOST_WIDE_INT high, low;
1775 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1779 case FIX_TRUNC_EXPR:
1780 real_trunc (&r, VOIDmode, &x);
1784 real_ceil (&r, VOIDmode, &x);
1787 case FIX_FLOOR_EXPR:
1788 real_floor (&r, VOIDmode, &x);
1795 /* If R is NaN, return zero and show we have an overflow. */
1796 if (REAL_VALUE_ISNAN (r))
1803 /* See if R is less than the lower bound or greater than the
1808 tree lt = TYPE_MIN_VALUE (type);
1809 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1810 if (REAL_VALUES_LESS (r, l))
1813 high = TREE_INT_CST_HIGH (lt);
1814 low = TREE_INT_CST_LOW (lt);
1820 tree ut = TYPE_MAX_VALUE (type);
1823 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1824 if (REAL_VALUES_LESS (u, r))
1827 high = TREE_INT_CST_HIGH (ut);
1828 low = TREE_INT_CST_LOW (ut);
1834 REAL_VALUE_TO_INT (&low, &high, r);
1836 t = build_int_2 (low, high);
1837 TREE_TYPE (t) = type;
1839 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1840 TREE_CONSTANT_OVERFLOW (t)
1841 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1845 else if (TREE_CODE (type) == REAL_TYPE)
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return build_real_from_int_cst (type, arg1);
1849 if (TREE_CODE (arg1) == REAL_CST)
1851 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1853 /* We make a copy of ARG1 so that we don't modify an
1854 existing constant tree. */
1855 t = copy_node (arg1);
1856 TREE_TYPE (t) = type;
1860 t = build_real (type,
1861 real_value_truncate (TYPE_MODE (type),
1862 TREE_REAL_CST (arg1)));
1865 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1866 TREE_CONSTANT_OVERFLOW (t)
1867 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1874 /* Convert expression ARG to type TYPE. Used by the middle-end for
1875 simple conversions in preference to calling the front-end's convert. */
1878 fold_convert (tree type, tree arg)
1880 tree orig = TREE_TYPE (arg);
1886 if (TREE_CODE (arg) == ERROR_MARK
1887 || TREE_CODE (type) == ERROR_MARK
1888 || TREE_CODE (orig) == ERROR_MARK)
1889 return error_mark_node;
1891 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1892 return fold (build1 (NOP_EXPR, type, arg));
1894 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1896 if (TREE_CODE (arg) == INTEGER_CST)
1898 tem = fold_convert_const (NOP_EXPR, type, arg);
1899 if (tem != NULL_TREE)
1902 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1903 return fold (build1 (NOP_EXPR, type, arg));
1904 if (TREE_CODE (orig) == COMPLEX_TYPE)
1906 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1907 return fold_convert (type, tem);
1909 if (TREE_CODE (orig) == VECTOR_TYPE
1910 && GET_MODE_SIZE (TYPE_MODE (type))
1911 == GET_MODE_SIZE (TYPE_MODE (orig)))
1912 return fold (build1 (NOP_EXPR, type, arg));
1914 else if (TREE_CODE (type) == REAL_TYPE)
1916 if (TREE_CODE (arg) == INTEGER_CST)
1918 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1922 else if (TREE_CODE (arg) == REAL_CST)
1924 tem = fold_convert_const (NOP_EXPR, type, arg);
1925 if (tem != NULL_TREE)
1929 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1930 return fold (build1 (FLOAT_EXPR, type, arg));
1931 if (TREE_CODE (orig) == REAL_TYPE)
1932 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1934 if (TREE_CODE (orig) == COMPLEX_TYPE)
1936 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1937 return fold_convert (type, tem);
1940 else if (TREE_CODE (type) == COMPLEX_TYPE)
1942 if (INTEGRAL_TYPE_P (orig)
1943 || POINTER_TYPE_P (orig)
1944 || TREE_CODE (orig) == REAL_TYPE)
1945 return build (COMPLEX_EXPR, type,
1946 fold_convert (TREE_TYPE (type), arg),
1947 fold_convert (TREE_TYPE (type), integer_zero_node));
1948 if (TREE_CODE (orig) == COMPLEX_TYPE)
1952 if (TREE_CODE (arg) == COMPLEX_EXPR)
1954 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1955 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1956 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1959 arg = save_expr (arg);
1960 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1961 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1962 rpart = fold_convert (TREE_TYPE (type), rpart);
1963 ipart = fold_convert (TREE_TYPE (type), ipart);
1964 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1967 else if (TREE_CODE (type) == VECTOR_TYPE)
1969 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1970 && GET_MODE_SIZE (TYPE_MODE (type))
1971 == GET_MODE_SIZE (TYPE_MODE (orig)))
1972 return fold (build1 (NOP_EXPR, type, arg));
1973 if (TREE_CODE (orig) == VECTOR_TYPE
1974 && GET_MODE_SIZE (TYPE_MODE (type))
1975 == GET_MODE_SIZE (TYPE_MODE (orig)))
1976 return fold (build1 (NOP_EXPR, type, arg));
1978 else if (VOID_TYPE_P (type))
1979 return fold (build1 (CONVERT_EXPR, type, arg));
1983 /* Return an expr equal to X but certainly not valid as an lvalue. */
1990 /* These things are certainly not lvalues. */
1991 if (TREE_CODE (x) == NON_LVALUE_EXPR
1992 || TREE_CODE (x) == INTEGER_CST
1993 || TREE_CODE (x) == REAL_CST
1994 || TREE_CODE (x) == STRING_CST
1995 || TREE_CODE (x) == ADDR_EXPR)
1998 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1999 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2003 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2004 Zero means allow extended lvalues. */
2006 int pedantic_lvalues;
2008 /* When pedantic, return an expr equal to X but certainly not valid as a
2009 pedantic lvalue. Otherwise, return X. */
2012 pedantic_non_lvalue (tree x)
2014 if (pedantic_lvalues)
2015 return non_lvalue (x);
2020 /* Given a tree comparison code, return the code that is the logical inverse
2021 of the given code. It is not safe to do this for floating-point
2022 comparisons, except for NE_EXPR and EQ_EXPR. */
2024 static enum tree_code
2025 invert_tree_comparison (enum tree_code code)
2046 /* Similar, but return the comparison that results if the operands are
2047 swapped. This is safe for floating-point. */
2049 static enum tree_code
2050 swap_tree_comparison (enum tree_code code)
2071 /* Convert a comparison tree code from an enum tree_code representation
2072 into a compcode bit-based encoding. This function is the inverse of
2073 compcode_to_comparison. */
2076 comparison_to_compcode (enum tree_code code)
2097 /* Convert a compcode bit-based encoding of a comparison operator back
2098 to GCC's enum tree_code representation. This function is the
2099 inverse of comparison_to_compcode. */
2101 static enum tree_code
2102 compcode_to_comparison (int code)
2123 /* Return nonzero if CODE is a tree code that represents a truth value. */
2126 truth_value_p (enum tree_code code)
2128 return (TREE_CODE_CLASS (code) == '<'
2129 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2130 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2131 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2134 /* Return nonzero if two operands (typically of the same tree node)
2135 are necessarily equal. If either argument has side-effects this
2136 function returns zero.
2138 If ONLY_CONST is nonzero, only return nonzero for constants.
2139 This function tests whether the operands are indistinguishable;
2140 it does not test whether they are equal using C's == operation.
2141 The distinction is important for IEEE floating point, because
2142 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2143 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2145 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2146 even though it may hold multiple values during a function.
2147 This is because a GCC tree node guarantees that nothing else is
2148 executed between the evaluation of its "operands" (which may often
2149 be evaluated in arbitrary order). Hence if the operands themselves
2150 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2151 same value in each operand/subexpression. Hence a zero value for
2152 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2153 If comparing arbitrary expression trees, such as from different
2154 statements, ONLY_CONST must usually be nonzero. */
2157 operand_equal_p (tree arg0, tree arg1, int only_const)
2161 /* If either is ERROR_MARK, they aren't equal. */
2162 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2165 /* If both types don't have the same signedness, then we can't consider
2166 them equal. We must check this before the STRIP_NOPS calls
2167 because they may change the signedness of the arguments. */
2168 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2174 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2175 /* This is needed for conversions and for COMPONENT_REF.
2176 Might as well play it safe and always test this. */
2177 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2178 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2179 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2182 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2183 We don't care about side effects in that case because the SAVE_EXPR
2184 takes care of that for us. In all other cases, two expressions are
2185 equal if they have no side effects. If we have two identical
2186 expressions with side effects that should be treated the same due
2187 to the only side effects being identical SAVE_EXPR's, that will
2188 be detected in the recursive calls below. */
2189 if (arg0 == arg1 && ! only_const
2190 && (TREE_CODE (arg0) == SAVE_EXPR
2191 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2194 /* Next handle constant cases, those for which we can return 1 even
2195 if ONLY_CONST is set. */
2196 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2197 switch (TREE_CODE (arg0))
2200 return (! TREE_CONSTANT_OVERFLOW (arg0)
2201 && ! TREE_CONSTANT_OVERFLOW (arg1)
2202 && tree_int_cst_equal (arg0, arg1));
2205 return (! TREE_CONSTANT_OVERFLOW (arg0)
2206 && ! TREE_CONSTANT_OVERFLOW (arg1)
2207 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2208 TREE_REAL_CST (arg1)));
2214 if (TREE_CONSTANT_OVERFLOW (arg0)
2215 || TREE_CONSTANT_OVERFLOW (arg1))
2218 v1 = TREE_VECTOR_CST_ELTS (arg0);
2219 v2 = TREE_VECTOR_CST_ELTS (arg1);
2222 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2225 v1 = TREE_CHAIN (v1);
2226 v2 = TREE_CHAIN (v2);
2233 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2235 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2239 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2240 && ! memcmp (TREE_STRING_POINTER (arg0),
2241 TREE_STRING_POINTER (arg1),
2242 TREE_STRING_LENGTH (arg0)));
2245 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2254 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2257 /* Two conversions are equal only if signedness and modes match. */
2258 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2259 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2260 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2263 return operand_equal_p (TREE_OPERAND (arg0, 0),
2264 TREE_OPERAND (arg1, 0), 0);
2268 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2269 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2273 /* For commutative ops, allow the other order. */
2274 return (commutative_tree_code (TREE_CODE (arg0))
2275 && operand_equal_p (TREE_OPERAND (arg0, 0),
2276 TREE_OPERAND (arg1, 1), 0)
2277 && operand_equal_p (TREE_OPERAND (arg0, 1),
2278 TREE_OPERAND (arg1, 0), 0));
2281 /* If either of the pointer (or reference) expressions we are
2282 dereferencing contain a side effect, these cannot be equal. */
2283 if (TREE_SIDE_EFFECTS (arg0)
2284 || TREE_SIDE_EFFECTS (arg1))
2287 switch (TREE_CODE (arg0))
2290 return operand_equal_p (TREE_OPERAND (arg0, 0),
2291 TREE_OPERAND (arg1, 0), 0);
2295 case ARRAY_RANGE_REF:
2296 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2297 TREE_OPERAND (arg1, 0), 0)
2298 && operand_equal_p (TREE_OPERAND (arg0, 1),
2299 TREE_OPERAND (arg1, 1), 0));
2302 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2303 TREE_OPERAND (arg1, 0), 0)
2304 && operand_equal_p (TREE_OPERAND (arg0, 1),
2305 TREE_OPERAND (arg1, 1), 0)
2306 && operand_equal_p (TREE_OPERAND (arg0, 2),
2307 TREE_OPERAND (arg1, 2), 0));
2313 switch (TREE_CODE (arg0))
2316 case TRUTH_NOT_EXPR:
2317 return operand_equal_p (TREE_OPERAND (arg0, 0),
2318 TREE_OPERAND (arg1, 0), 0);
2321 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2324 /* If the CALL_EXPRs call different functions, then they
2325 clearly can not be equal. */
2326 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2327 TREE_OPERAND (arg1, 0), 0))
2330 /* Only consider const functions equivalent. */
2331 fndecl = get_callee_fndecl (arg0);
2332 if (fndecl == NULL_TREE
2333 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2336 /* Now see if all the arguments are the same. operand_equal_p
2337 does not handle TREE_LIST, so we walk the operands here
2338 feeding them to operand_equal_p. */
2339 arg0 = TREE_OPERAND (arg0, 1);
2340 arg1 = TREE_OPERAND (arg1, 1);
2341 while (arg0 && arg1)
2343 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2346 arg0 = TREE_CHAIN (arg0);
2347 arg1 = TREE_CHAIN (arg1);
2350 /* If we get here and both argument lists are exhausted
2351 then the CALL_EXPRs are equal. */
2352 return ! (arg0 || arg1);
2359 /* Consider __builtin_sqrt equal to sqrt. */
2360 return TREE_CODE (arg0) == FUNCTION_DECL
2361 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2362 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2363 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2370 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2371 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2373 When in doubt, return 0. */
2376 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2378 int unsignedp1, unsignedpo;
2379 tree primarg0, primarg1, primother;
2380 unsigned int correct_width;
2382 if (operand_equal_p (arg0, arg1, 0))
2385 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2386 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2389 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2390 and see if the inner values are the same. This removes any
2391 signedness comparison, which doesn't matter here. */
2392 primarg0 = arg0, primarg1 = arg1;
2393 STRIP_NOPS (primarg0);
2394 STRIP_NOPS (primarg1);
2395 if (operand_equal_p (primarg0, primarg1, 0))
2398 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2399 actual comparison operand, ARG0.
2401 First throw away any conversions to wider types
2402 already present in the operands. */
2404 primarg1 = get_narrower (arg1, &unsignedp1);
2405 primother = get_narrower (other, &unsignedpo);
2407 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2408 if (unsignedp1 == unsignedpo
2409 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2410 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2412 tree type = TREE_TYPE (arg0);
2414 /* Make sure shorter operand is extended the right way
2415 to match the longer operand. */
2416 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2417 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2419 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2426 /* See if ARG is an expression that is either a comparison or is performing
2427 arithmetic on comparisons. The comparisons must only be comparing
2428 two different values, which will be stored in *CVAL1 and *CVAL2; if
2429 they are nonzero it means that some operands have already been found.
2430 No variables may be used anywhere else in the expression except in the
2431 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2432 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2434 If this is true, return 1. Otherwise, return zero. */
2437 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2439 enum tree_code code = TREE_CODE (arg);
2440 char class = TREE_CODE_CLASS (code);
2442 /* We can handle some of the 'e' cases here. */
2443 if (class == 'e' && code == TRUTH_NOT_EXPR)
2445 else if (class == 'e'
2446 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2447 || code == COMPOUND_EXPR))
2450 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2451 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2453 /* If we've already found a CVAL1 or CVAL2, this expression is
2454 two complex to handle. */
2455 if (*cval1 || *cval2)
2465 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2468 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2469 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2470 cval1, cval2, save_p));
2476 if (code == COND_EXPR)
2477 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2478 cval1, cval2, save_p)
2479 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2480 cval1, cval2, save_p)
2481 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2482 cval1, cval2, save_p));
2486 /* First see if we can handle the first operand, then the second. For
2487 the second operand, we know *CVAL1 can't be zero. It must be that
2488 one side of the comparison is each of the values; test for the
2489 case where this isn't true by failing if the two operands
2492 if (operand_equal_p (TREE_OPERAND (arg, 0),
2493 TREE_OPERAND (arg, 1), 0))
2497 *cval1 = TREE_OPERAND (arg, 0);
2498 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2500 else if (*cval2 == 0)
2501 *cval2 = TREE_OPERAND (arg, 0);
2502 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2507 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2509 else if (*cval2 == 0)
2510 *cval2 = TREE_OPERAND (arg, 1);
2511 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2523 /* ARG is a tree that is known to contain just arithmetic operations and
2524 comparisons. Evaluate the operations in the tree substituting NEW0 for
2525 any occurrence of OLD0 as an operand of a comparison and likewise for
2529 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2531 tree type = TREE_TYPE (arg);
2532 enum tree_code code = TREE_CODE (arg);
2533 char class = TREE_CODE_CLASS (code);
2535 /* We can handle some of the 'e' cases here. */
2536 if (class == 'e' && code == TRUTH_NOT_EXPR)
2538 else if (class == 'e'
2539 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2545 return fold (build1 (code, type,
2546 eval_subst (TREE_OPERAND (arg, 0),
2547 old0, new0, old1, new1)));
2550 return fold (build (code, type,
2551 eval_subst (TREE_OPERAND (arg, 0),
2552 old0, new0, old1, new1),
2553 eval_subst (TREE_OPERAND (arg, 1),
2554 old0, new0, old1, new1)));
2560 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2563 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2566 return fold (build (code, type,
2567 eval_subst (TREE_OPERAND (arg, 0),
2568 old0, new0, old1, new1),
2569 eval_subst (TREE_OPERAND (arg, 1),
2570 old0, new0, old1, new1),
2571 eval_subst (TREE_OPERAND (arg, 2),
2572 old0, new0, old1, new1)));
2576 /* Fall through - ??? */
2580 tree arg0 = TREE_OPERAND (arg, 0);
2581 tree arg1 = TREE_OPERAND (arg, 1);
2583 /* We need to check both for exact equality and tree equality. The
2584 former will be true if the operand has a side-effect. In that
2585 case, we know the operand occurred exactly once. */
2587 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2589 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2592 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2594 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2597 return fold (build (code, type, arg0, arg1));
2605 /* Return a tree for the case when the result of an expression is RESULT
2606 converted to TYPE and OMITTED was previously an operand of the expression
2607 but is now not needed (e.g., we folded OMITTED * 0).
2609 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2610 the conversion of RESULT to TYPE. */
2613 omit_one_operand (tree type, tree result, tree omitted)
2615 tree t = fold_convert (type, result);
2617 if (TREE_SIDE_EFFECTS (omitted))
2618 return build (COMPOUND_EXPR, type, omitted, t);
2620 return non_lvalue (t);
2623 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2626 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2628 tree t = fold_convert (type, result);
2630 if (TREE_SIDE_EFFECTS (omitted))
2631 return build (COMPOUND_EXPR, type, omitted, t);
2633 return pedantic_non_lvalue (t);
2636 /* Return a simplified tree node for the truth-negation of ARG. This
2637 never alters ARG itself. We assume that ARG is an operation that
2638 returns a truth value (0 or 1). */
2641 invert_truthvalue (tree arg)
2643 tree type = TREE_TYPE (arg);
2644 enum tree_code code = TREE_CODE (arg);
2646 if (code == ERROR_MARK)
2649 /* If this is a comparison, we can simply invert it, except for
2650 floating-point non-equality comparisons, in which case we just
2651 enclose a TRUTH_NOT_EXPR around what we have. */
2653 if (TREE_CODE_CLASS (code) == '<')
2655 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2656 && !flag_unsafe_math_optimizations
2659 return build1 (TRUTH_NOT_EXPR, type, arg);
2660 else if (code == UNORDERED_EXPR
2661 || code == ORDERED_EXPR
2662 || code == UNEQ_EXPR
2663 || code == UNLT_EXPR
2664 || code == UNLE_EXPR
2665 || code == UNGT_EXPR
2666 || code == UNGE_EXPR)
2667 return build1 (TRUTH_NOT_EXPR, type, arg);
2669 return build (invert_tree_comparison (code), type,
2670 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2676 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2678 case TRUTH_AND_EXPR:
2679 return build (TRUTH_OR_EXPR, type,
2680 invert_truthvalue (TREE_OPERAND (arg, 0)),
2681 invert_truthvalue (TREE_OPERAND (arg, 1)));
2684 return build (TRUTH_AND_EXPR, type,
2685 invert_truthvalue (TREE_OPERAND (arg, 0)),
2686 invert_truthvalue (TREE_OPERAND (arg, 1)));
2688 case TRUTH_XOR_EXPR:
2689 /* Here we can invert either operand. We invert the first operand
2690 unless the second operand is a TRUTH_NOT_EXPR in which case our
2691 result is the XOR of the first operand with the inside of the
2692 negation of the second operand. */
2694 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2695 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2696 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2698 return build (TRUTH_XOR_EXPR, type,
2699 invert_truthvalue (TREE_OPERAND (arg, 0)),
2700 TREE_OPERAND (arg, 1));
2702 case TRUTH_ANDIF_EXPR:
2703 return build (TRUTH_ORIF_EXPR, type,
2704 invert_truthvalue (TREE_OPERAND (arg, 0)),
2705 invert_truthvalue (TREE_OPERAND (arg, 1)));
2707 case TRUTH_ORIF_EXPR:
2708 return build (TRUTH_ANDIF_EXPR, type,
2709 invert_truthvalue (TREE_OPERAND (arg, 0)),
2710 invert_truthvalue (TREE_OPERAND (arg, 1)));
2712 case TRUTH_NOT_EXPR:
2713 return TREE_OPERAND (arg, 0);
2716 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2717 invert_truthvalue (TREE_OPERAND (arg, 1)),
2718 invert_truthvalue (TREE_OPERAND (arg, 2)));
2721 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2722 invert_truthvalue (TREE_OPERAND (arg, 1)));
2724 case NON_LVALUE_EXPR:
2725 return invert_truthvalue (TREE_OPERAND (arg, 0));
2730 return build1 (TREE_CODE (arg), type,
2731 invert_truthvalue (TREE_OPERAND (arg, 0)));
2734 if (!integer_onep (TREE_OPERAND (arg, 1)))
2736 return build (EQ_EXPR, type, arg,
2737 fold_convert (type, integer_zero_node));
2740 return build1 (TRUTH_NOT_EXPR, type, arg);
2742 case CLEANUP_POINT_EXPR:
2743 return build1 (CLEANUP_POINT_EXPR, type,
2744 invert_truthvalue (TREE_OPERAND (arg, 0)));
2749 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2751 return build1 (TRUTH_NOT_EXPR, type, arg);
2754 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2755 operands are another bit-wise operation with a common input. If so,
2756 distribute the bit operations to save an operation and possibly two if
2757 constants are involved. For example, convert
2758 (A | B) & (A | C) into A | (B & C)
2759 Further simplification will occur if B and C are constants.
2761 If this optimization cannot be done, 0 will be returned. */
2764 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2769 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2770 || TREE_CODE (arg0) == code
2771 || (TREE_CODE (arg0) != BIT_AND_EXPR
2772 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2775 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2777 common = TREE_OPERAND (arg0, 0);
2778 left = TREE_OPERAND (arg0, 1);
2779 right = TREE_OPERAND (arg1, 1);
2781 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2783 common = TREE_OPERAND (arg0, 0);
2784 left = TREE_OPERAND (arg0, 1);
2785 right = TREE_OPERAND (arg1, 0);
2787 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2789 common = TREE_OPERAND (arg0, 1);
2790 left = TREE_OPERAND (arg0, 0);
2791 right = TREE_OPERAND (arg1, 1);
2793 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2795 common = TREE_OPERAND (arg0, 1);
2796 left = TREE_OPERAND (arg0, 0);
2797 right = TREE_OPERAND (arg1, 0);
2802 return fold (build (TREE_CODE (arg0), type, common,
2803 fold (build (code, type, left, right))));
2806 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2807 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2810 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2813 tree result = build (BIT_FIELD_REF, type, inner,
2814 size_int (bitsize), bitsize_int (bitpos));
2816 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
2821 /* Optimize a bit-field compare.
2823 There are two cases: First is a compare against a constant and the
2824 second is a comparison of two items where the fields are at the same
2825 bit position relative to the start of a chunk (byte, halfword, word)
2826 large enough to contain it. In these cases we can avoid the shift
2827 implicit in bitfield extractions.
2829 For constants, we emit a compare of the shifted constant with the
2830 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2831 compared. For two fields at the same position, we do the ANDs with the
2832 similar mask and compare the result of the ANDs.
2834 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2835 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2836 are the left and right operands of the comparison, respectively.
2838 If the optimization described above can be done, we return the resulting
2839 tree. Otherwise we return zero. */
2842 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2845 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2846 tree type = TREE_TYPE (lhs);
2847 tree signed_type, unsigned_type;
2848 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2849 enum machine_mode lmode, rmode, nmode;
2850 int lunsignedp, runsignedp;
2851 int lvolatilep = 0, rvolatilep = 0;
2852 tree linner, rinner = NULL_TREE;
2856 /* Get all the information about the extractions being done. If the bit size
2857 if the same as the size of the underlying object, we aren't doing an
2858 extraction at all and so can do nothing. We also don't want to
2859 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2860 then will no longer be able to replace it. */
2861 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2862 &lunsignedp, &lvolatilep);
2863 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2864 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2869 /* If this is not a constant, we can only do something if bit positions,
2870 sizes, and signedness are the same. */
2871 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2872 &runsignedp, &rvolatilep);
2874 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2875 || lunsignedp != runsignedp || offset != 0
2876 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2880 /* See if we can find a mode to refer to this field. We should be able to,
2881 but fail if we can't. */
2882 nmode = get_best_mode (lbitsize, lbitpos,
2883 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2884 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2885 TYPE_ALIGN (TREE_TYPE (rinner))),
2886 word_mode, lvolatilep || rvolatilep);
2887 if (nmode == VOIDmode)
2890 /* Set signed and unsigned types of the precision of this mode for the
2892 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
2893 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
2895 /* Compute the bit position and size for the new reference and our offset
2896 within it. If the new reference is the same size as the original, we
2897 won't optimize anything, so return zero. */
2898 nbitsize = GET_MODE_BITSIZE (nmode);
2899 nbitpos = lbitpos & ~ (nbitsize - 1);
2901 if (nbitsize == lbitsize)
2904 if (BYTES_BIG_ENDIAN)
2905 lbitpos = nbitsize - lbitsize - lbitpos;
2907 /* Make the mask to be used against the extracted field. */
2908 mask = build_int_2 (~0, ~0);
2909 TREE_TYPE (mask) = unsigned_type;
2910 force_fit_type (mask, 0);
2911 mask = fold_convert (unsigned_type, mask);
2912 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2913 mask = const_binop (RSHIFT_EXPR, mask,
2914 size_int (nbitsize - lbitsize - lbitpos), 0);
2917 /* If not comparing with constant, just rework the comparison
2919 return build (code, compare_type,
2920 build (BIT_AND_EXPR, unsigned_type,
2921 make_bit_field_ref (linner, unsigned_type,
2922 nbitsize, nbitpos, 1),
2924 build (BIT_AND_EXPR, unsigned_type,
2925 make_bit_field_ref (rinner, unsigned_type,
2926 nbitsize, nbitpos, 1),
2929 /* Otherwise, we are handling the constant case. See if the constant is too
2930 big for the field. Warn and return a tree of for 0 (false) if so. We do
2931 this not only for its own sake, but to avoid having to test for this
2932 error case below. If we didn't, we might generate wrong code.
2934 For unsigned fields, the constant shifted right by the field length should
2935 be all zero. For signed fields, the high-order bits should agree with
2940 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2941 fold_convert (unsigned_type, rhs),
2942 size_int (lbitsize), 0)))
2944 warning ("comparison is always %d due to width of bit-field",
2946 return fold_convert (compare_type,
2948 ? integer_one_node : integer_zero_node));
2953 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2954 size_int (lbitsize - 1), 0);
2955 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2957 warning ("comparison is always %d due to width of bit-field",
2959 return fold_convert (compare_type,
2961 ? integer_one_node : integer_zero_node));
2965 /* Single-bit compares should always be against zero. */
2966 if (lbitsize == 1 && ! integer_zerop (rhs))
2968 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2969 rhs = fold_convert (type, integer_zero_node);
2972 /* Make a new bitfield reference, shift the constant over the
2973 appropriate number of bits and mask it with the computed mask
2974 (in case this was a signed field). If we changed it, make a new one. */
2975 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2978 TREE_SIDE_EFFECTS (lhs) = 1;
2979 TREE_THIS_VOLATILE (lhs) = 1;
2982 rhs = fold (const_binop (BIT_AND_EXPR,
2983 const_binop (LSHIFT_EXPR,
2984 fold_convert (unsigned_type, rhs),
2985 size_int (lbitpos), 0),
2988 return build (code, compare_type,
2989 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2993 /* Subroutine for fold_truthop: decode a field reference.
2995 If EXP is a comparison reference, we return the innermost reference.
2997 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2998 set to the starting bit number.
3000 If the innermost field can be completely contained in a mode-sized
3001 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3003 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3004 otherwise it is not changed.
3006 *PUNSIGNEDP is set to the signedness of the field.
3008 *PMASK is set to the mask used. This is either contained in a
3009 BIT_AND_EXPR or derived from the width of the field.
3011 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3013 Return 0 if this is not a component reference or is one that we can't
3014 do anything with. */
3017 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3018 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3019 int *punsignedp, int *pvolatilep,
3020 tree *pmask, tree *pand_mask)
3022 tree outer_type = 0;
3024 tree mask, inner, offset;
3026 unsigned int precision;
3028 /* All the optimizations using this function assume integer fields.
3029 There are problems with FP fields since the type_for_size call
3030 below can fail for, e.g., XFmode. */
3031 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3034 /* We are interested in the bare arrangement of bits, so strip everything
3035 that doesn't affect the machine mode. However, record the type of the
3036 outermost expression if it may matter below. */
3037 if (TREE_CODE (exp) == NOP_EXPR
3038 || TREE_CODE (exp) == CONVERT_EXPR
3039 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3040 outer_type = TREE_TYPE (exp);
3043 if (TREE_CODE (exp) == BIT_AND_EXPR)
3045 and_mask = TREE_OPERAND (exp, 1);
3046 exp = TREE_OPERAND (exp, 0);
3047 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3048 if (TREE_CODE (and_mask) != INTEGER_CST)
3052 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3053 punsignedp, pvolatilep);
3054 if ((inner == exp && and_mask == 0)
3055 || *pbitsize < 0 || offset != 0
3056 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3059 /* If the number of bits in the reference is the same as the bitsize of
3060 the outer type, then the outer type gives the signedness. Otherwise
3061 (in case of a small bitfield) the signedness is unchanged. */
3062 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3063 *punsignedp = TYPE_UNSIGNED (outer_type);
3065 /* Compute the mask to access the bitfield. */
3066 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3067 precision = TYPE_PRECISION (unsigned_type);
3069 mask = build_int_2 (~0, ~0);
3070 TREE_TYPE (mask) = unsigned_type;
3071 force_fit_type (mask, 0);
3072 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3073 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3075 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3077 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3078 fold_convert (unsigned_type, and_mask), mask));
3081 *pand_mask = and_mask;
3085 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3089 all_ones_mask_p (tree mask, int size)
3091 tree type = TREE_TYPE (mask);
3092 unsigned int precision = TYPE_PRECISION (type);
3095 tmask = build_int_2 (~0, ~0);
3096 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3097 force_fit_type (tmask, 0);
3099 tree_int_cst_equal (mask,
3100 const_binop (RSHIFT_EXPR,
3101 const_binop (LSHIFT_EXPR, tmask,
3102 size_int (precision - size),
3104 size_int (precision - size), 0));
3107 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3108 represents the sign bit of EXP's type. If EXP represents a sign
3109 or zero extension, also test VAL against the unextended type.
3110 The return value is the (sub)expression whose sign bit is VAL,
3111 or NULL_TREE otherwise. */
3114 sign_bit_p (tree exp, tree val)
3116 unsigned HOST_WIDE_INT mask_lo, lo;
3117 HOST_WIDE_INT mask_hi, hi;
3121 /* Tree EXP must have an integral type. */
3122 t = TREE_TYPE (exp);
3123 if (! INTEGRAL_TYPE_P (t))
3126 /* Tree VAL must be an integer constant. */
3127 if (TREE_CODE (val) != INTEGER_CST
3128 || TREE_CONSTANT_OVERFLOW (val))
3131 width = TYPE_PRECISION (t);
3132 if (width > HOST_BITS_PER_WIDE_INT)
3134 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3137 mask_hi = ((unsigned HOST_WIDE_INT) -1
3138 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3144 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3147 mask_lo = ((unsigned HOST_WIDE_INT) -1
3148 >> (HOST_BITS_PER_WIDE_INT - width));
3151 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3152 treat VAL as if it were unsigned. */
3153 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3154 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3157 /* Handle extension from a narrower type. */
3158 if (TREE_CODE (exp) == NOP_EXPR
3159 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3160 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3165 /* Subroutine for fold_truthop: determine if an operand is simple enough
3166 to be evaluated unconditionally. */
3169 simple_operand_p (tree exp)
3171 /* Strip any conversions that don't change the machine mode. */
3172 while ((TREE_CODE (exp) == NOP_EXPR
3173 || TREE_CODE (exp) == CONVERT_EXPR)
3174 && (TYPE_MODE (TREE_TYPE (exp))
3175 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3176 exp = TREE_OPERAND (exp, 0);
3178 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3180 && ! TREE_ADDRESSABLE (exp)
3181 && ! TREE_THIS_VOLATILE (exp)
3182 && ! DECL_NONLOCAL (exp)
3183 /* Don't regard global variables as simple. They may be
3184 allocated in ways unknown to the compiler (shared memory,
3185 #pragma weak, etc). */
3186 && ! TREE_PUBLIC (exp)
3187 && ! DECL_EXTERNAL (exp)
3188 /* Loading a static variable is unduly expensive, but global
3189 registers aren't expensive. */
3190 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3193 /* The following functions are subroutines to fold_range_test and allow it to
3194 try to change a logical combination of comparisons into a range test.
3197 X == 2 || X == 3 || X == 4 || X == 5
3201 (unsigned) (X - 2) <= 3
3203 We describe each set of comparisons as being either inside or outside
3204 a range, using a variable named like IN_P, and then describe the
3205 range with a lower and upper bound. If one of the bounds is omitted,
3206 it represents either the highest or lowest value of the type.
3208 In the comments below, we represent a range by two numbers in brackets
3209 preceded by a "+" to designate being inside that range, or a "-" to
3210 designate being outside that range, so the condition can be inverted by
3211 flipping the prefix. An omitted bound is represented by a "-". For
3212 example, "- [-, 10]" means being outside the range starting at the lowest
3213 possible value and ending at 10, in other words, being greater than 10.
3214 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3217 We set up things so that the missing bounds are handled in a consistent
3218 manner so neither a missing bound nor "true" and "false" need to be
3219 handled using a special case. */
3221 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3222 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3223 and UPPER1_P are nonzero if the respective argument is an upper bound
3224 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3225 must be specified for a comparison. ARG1 will be converted to ARG0's
3226 type if both are specified. */
3229 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3230 tree arg1, int upper1_p)
3236 /* If neither arg represents infinity, do the normal operation.
3237 Else, if not a comparison, return infinity. Else handle the special
3238 comparison rules. Note that most of the cases below won't occur, but
3239 are handled for consistency. */
3241 if (arg0 != 0 && arg1 != 0)
3243 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3244 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3246 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3249 if (TREE_CODE_CLASS (code) != '<')
3252 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3253 for neither. In real maths, we cannot assume open ended ranges are
3254 the same. But, this is computer arithmetic, where numbers are finite.
3255 We can therefore make the transformation of any unbounded range with
3256 the value Z, Z being greater than any representable number. This permits
3257 us to treat unbounded ranges as equal. */
3258 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3259 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3263 result = sgn0 == sgn1;
3266 result = sgn0 != sgn1;
3269 result = sgn0 < sgn1;
3272 result = sgn0 <= sgn1;
3275 result = sgn0 > sgn1;
3278 result = sgn0 >= sgn1;
3284 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3287 /* Given EXP, a logical expression, set the range it is testing into
3288 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3289 actually being tested. *PLOW and *PHIGH will be made of the same type
3290 as the returned expression. If EXP is not a comparison, we will most
3291 likely not be returning a useful value and range. */
3294 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3296 enum tree_code code;
3297 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3298 tree orig_type = NULL_TREE;
3300 tree low, high, n_low, n_high;
3302 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3303 and see if we can refine the range. Some of the cases below may not
3304 happen, but it doesn't seem worth worrying about this. We "continue"
3305 the outer loop when we've changed something; otherwise we "break"
3306 the switch, which will "break" the while. */
3309 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3313 code = TREE_CODE (exp);
3315 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3317 if (first_rtl_op (code) > 0)
3318 arg0 = TREE_OPERAND (exp, 0);
3319 if (TREE_CODE_CLASS (code) == '<'
3320 || TREE_CODE_CLASS (code) == '1'
3321 || TREE_CODE_CLASS (code) == '2')
3322 type = TREE_TYPE (arg0);
3323 if (TREE_CODE_CLASS (code) == '2'
3324 || TREE_CODE_CLASS (code) == '<'
3325 || (TREE_CODE_CLASS (code) == 'e'
3326 && TREE_CODE_LENGTH (code) > 1))
3327 arg1 = TREE_OPERAND (exp, 1);
3330 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3331 lose a cast by accident. */
3332 if (type != NULL_TREE && orig_type == NULL_TREE)
3337 case TRUTH_NOT_EXPR:
3338 in_p = ! in_p, exp = arg0;
3341 case EQ_EXPR: case NE_EXPR:
3342 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3343 /* We can only do something if the range is testing for zero
3344 and if the second operand is an integer constant. Note that
3345 saying something is "in" the range we make is done by
3346 complementing IN_P since it will set in the initial case of
3347 being not equal to zero; "out" is leaving it alone. */
3348 if (low == 0 || high == 0
3349 || ! integer_zerop (low) || ! integer_zerop (high)
3350 || TREE_CODE (arg1) != INTEGER_CST)
3355 case NE_EXPR: /* - [c, c] */
3358 case EQ_EXPR: /* + [c, c] */
3359 in_p = ! in_p, low = high = arg1;
3361 case GT_EXPR: /* - [-, c] */
3362 low = 0, high = arg1;
3364 case GE_EXPR: /* + [c, -] */
3365 in_p = ! in_p, low = arg1, high = 0;
3367 case LT_EXPR: /* - [c, -] */
3368 low = arg1, high = 0;
3370 case LE_EXPR: /* + [-, c] */
3371 in_p = ! in_p, low = 0, high = arg1;
3379 /* If this is an unsigned comparison, we also know that EXP is
3380 greater than or equal to zero. We base the range tests we make
3381 on that fact, so we record it here so we can parse existing
3383 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3385 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3386 1, fold_convert (type, integer_zero_node),
3390 in_p = n_in_p, low = n_low, high = n_high;
3392 /* If the high bound is missing, but we have a nonzero low
3393 bound, reverse the range so it goes from zero to the low bound
3395 if (high == 0 && low && ! integer_zerop (low))
3398 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3399 integer_one_node, 0);
3400 low = fold_convert (type, integer_zero_node);
3406 /* (-x) IN [a,b] -> x in [-b, -a] */
3407 n_low = range_binop (MINUS_EXPR, type,
3408 fold_convert (type, integer_zero_node),
3410 n_high = range_binop (MINUS_EXPR, type,
3411 fold_convert (type, integer_zero_node),
3413 low = n_low, high = n_high;
3419 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3420 fold_convert (type, integer_one_node));
3423 case PLUS_EXPR: case MINUS_EXPR:
3424 if (TREE_CODE (arg1) != INTEGER_CST)
3427 /* If EXP is signed, any overflow in the computation is undefined,
3428 so we don't worry about it so long as our computations on
3429 the bounds don't overflow. For unsigned, overflow is defined
3430 and this is exactly the right thing. */
3431 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3432 type, low, 0, arg1, 0);
3433 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3434 type, high, 1, arg1, 0);
3435 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3436 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3439 /* Check for an unsigned range which has wrapped around the maximum
3440 value thus making n_high < n_low, and normalize it. */
3441 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3443 low = range_binop (PLUS_EXPR, type, n_high, 0,
3444 integer_one_node, 0);
3445 high = range_binop (MINUS_EXPR, type, n_low, 0,
3446 integer_one_node, 0);
3448 /* If the range is of the form +/- [ x+1, x ], we won't
3449 be able to normalize it. But then, it represents the
3450 whole range or the empty set, so make it
3452 if (tree_int_cst_equal (n_low, low)
3453 && tree_int_cst_equal (n_high, high))
3459 low = n_low, high = n_high;
3464 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3465 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3468 if (! INTEGRAL_TYPE_P (type)
3469 || (low != 0 && ! int_fits_type_p (low, type))
3470 || (high != 0 && ! int_fits_type_p (high, type)))
3473 n_low = low, n_high = high;
3476 n_low = fold_convert (type, n_low);
3479 n_high = fold_convert (type, n_high);
3481 /* If we're converting from an unsigned to a signed type,
3482 we will be doing the comparison as unsigned. The tests above
3483 have already verified that LOW and HIGH are both positive.
3485 So we have to make sure that the original unsigned value will
3486 be interpreted as positive. */
3487 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3489 tree equiv_type = lang_hooks.types.type_for_mode
3490 (TYPE_MODE (type), 1);
3493 /* A range without an upper bound is, naturally, unbounded.
3494 Since convert would have cropped a very large value, use
3495 the max value for the destination type. */
3497 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3498 : TYPE_MAX_VALUE (type);
3500 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3501 high_positive = fold (build (RSHIFT_EXPR, type,
3505 integer_one_node)));
3507 /* If the low bound is specified, "and" the range with the
3508 range for which the original unsigned value will be
3512 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3513 1, n_low, n_high, 1,
3514 fold_convert (type, integer_zero_node),
3518 in_p = (n_in_p == in_p);
3522 /* Otherwise, "or" the range with the range of the input
3523 that will be interpreted as negative. */
3524 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3525 0, n_low, n_high, 1,
3526 fold_convert (type, integer_zero_node),
3530 in_p = (in_p != n_in_p);
3535 low = n_low, high = n_high;
3545 /* If EXP is a constant, we can evaluate whether this is true or false. */
3546 if (TREE_CODE (exp) == INTEGER_CST)
3548 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3550 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3556 *pin_p = in_p, *plow = low, *phigh = high;
3560 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3561 type, TYPE, return an expression to test if EXP is in (or out of, depending
3562 on IN_P) the range. */
3565 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3567 tree etype = TREE_TYPE (exp);
3571 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3572 return invert_truthvalue (value);
3574 if (low == 0 && high == 0)
3575 return fold_convert (type, integer_one_node);
3578 return fold (build (LE_EXPR, type, exp, high));
3581 return fold (build (GE_EXPR, type, exp, low));
3583 if (operand_equal_p (low, high, 0))
3584 return fold (build (EQ_EXPR, type, exp, low));
3586 if (integer_zerop (low))
3588 if (! TYPE_UNSIGNED (etype))
3590 etype = lang_hooks.types.unsigned_type (etype);
3591 high = fold_convert (etype, high);
3592 exp = fold_convert (etype, exp);
3594 return build_range_check (type, exp, 1, 0, high);
3597 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3598 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3600 unsigned HOST_WIDE_INT lo;
3604 prec = TYPE_PRECISION (etype);
3605 if (prec <= HOST_BITS_PER_WIDE_INT)
3608 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3612 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3613 lo = (unsigned HOST_WIDE_INT) -1;
3616 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3618 if (TYPE_UNSIGNED (etype))
3620 etype = lang_hooks.types.signed_type (etype);
3621 exp = fold_convert (etype, exp);
3623 return fold (build (GT_EXPR, type, exp,
3624 fold_convert (etype, integer_zero_node)));
3628 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3629 && ! TREE_OVERFLOW (value))
3630 return build_range_check (type,
3631 fold (build (MINUS_EXPR, etype, exp, low)),
3632 1, fold_convert (etype, integer_zero_node),
3638 /* Given two ranges, see if we can merge them into one. Return 1 if we
3639 can, 0 if we can't. Set the output range into the specified parameters. */
3642 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3643 tree high0, int in1_p, tree low1, tree high1)
3651 int lowequal = ((low0 == 0 && low1 == 0)
3652 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3653 low0, 0, low1, 0)));
3654 int highequal = ((high0 == 0 && high1 == 0)
3655 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3656 high0, 1, high1, 1)));
3658 /* Make range 0 be the range that starts first, or ends last if they
3659 start at the same value. Swap them if it isn't. */
3660 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3663 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3664 high1, 1, high0, 1))))
3666 temp = in0_p, in0_p = in1_p, in1_p = temp;
3667 tem = low0, low0 = low1, low1 = tem;
3668 tem = high0, high0 = high1, high1 = tem;
3671 /* Now flag two cases, whether the ranges are disjoint or whether the
3672 second range is totally subsumed in the first. Note that the tests
3673 below are simplified by the ones above. */
3674 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3675 high0, 1, low1, 0));
3676 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3677 high1, 1, high0, 1));
3679 /* We now have four cases, depending on whether we are including or
3680 excluding the two ranges. */
3683 /* If they don't overlap, the result is false. If the second range
3684 is a subset it is the result. Otherwise, the range is from the start
3685 of the second to the end of the first. */
3687 in_p = 0, low = high = 0;
3689 in_p = 1, low = low1, high = high1;
3691 in_p = 1, low = low1, high = high0;
3694 else if (in0_p && ! in1_p)
3696 /* If they don't overlap, the result is the first range. If they are
3697 equal, the result is false. If the second range is a subset of the
3698 first, and the ranges begin at the same place, we go from just after
3699 the end of the first range to the end of the second. If the second
3700 range is not a subset of the first, or if it is a subset and both
3701 ranges end at the same place, the range starts at the start of the
3702 first range and ends just before the second range.
3703 Otherwise, we can't describe this as a single range. */
3705 in_p = 1, low = low0, high = high0;
3706 else if (lowequal && highequal)
3707 in_p = 0, low = high = 0;
3708 else if (subset && lowequal)
3710 in_p = 1, high = high0;
3711 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3712 integer_one_node, 0);
3714 else if (! subset || highequal)
3716 in_p = 1, low = low0;
3717 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3718 integer_one_node, 0);
3724 else if (! in0_p && in1_p)
3726 /* If they don't overlap, the result is the second range. If the second
3727 is a subset of the first, the result is false. Otherwise,
3728 the range starts just after the first range and ends at the
3729 end of the second. */
3731 in_p = 1, low = low1, high = high1;
3732 else if (subset || highequal)
3733 in_p = 0, low = high = 0;
3736 in_p = 1, high = high1;
3737 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3738 integer_one_node, 0);
3744 /* The case where we are excluding both ranges. Here the complex case
3745 is if they don't overlap. In that case, the only time we have a
3746 range is if they are adjacent. If the second is a subset of the
3747 first, the result is the first. Otherwise, the range to exclude
3748 starts at the beginning of the first range and ends at the end of the
3752 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3753 range_binop (PLUS_EXPR, NULL_TREE,
3755 integer_one_node, 1),
3757 in_p = 0, low = low0, high = high1;
3762 in_p = 0, low = low0, high = high0;
3764 in_p = 0, low = low0, high = high1;
3767 *pin_p = in_p, *plow = low, *phigh = high;
3771 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3772 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3775 /* EXP is some logical combination of boolean tests. See if we can
3776 merge it into some range test. Return the new tree if so. */
3779 fold_range_test (tree exp)
3781 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3782 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3783 int in0_p, in1_p, in_p;
3784 tree low0, low1, low, high0, high1, high;
3785 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3786 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3789 /* If this is an OR operation, invert both sides; we will invert
3790 again at the end. */
3792 in0_p = ! in0_p, in1_p = ! in1_p;
3794 /* If both expressions are the same, if we can merge the ranges, and we
3795 can build the range test, return it or it inverted. If one of the
3796 ranges is always true or always false, consider it to be the same
3797 expression as the other. */
3798 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3799 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3801 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3803 : rhs != 0 ? rhs : integer_zero_node,
3805 return or_op ? invert_truthvalue (tem) : tem;
3807 /* On machines where the branch cost is expensive, if this is a
3808 short-circuited branch and the underlying object on both sides
3809 is the same, make a non-short-circuit operation. */
3810 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3811 && lhs != 0 && rhs != 0
3812 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3813 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3814 && operand_equal_p (lhs, rhs, 0))
3816 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3817 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3818 which cases we can't do this. */
3819 if (simple_operand_p (lhs))
3820 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3821 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3822 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3823 TREE_OPERAND (exp, 1));
3825 else if (lang_hooks.decls.global_bindings_p () == 0
3826 && ! CONTAINS_PLACEHOLDER_P (lhs))
3828 tree common = save_expr (lhs);
3830 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3831 or_op ? ! in0_p : in0_p,
3833 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3834 or_op ? ! in1_p : in1_p,
3836 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3837 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3838 TREE_TYPE (exp), lhs, rhs);
3845 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3846 bit value. Arrange things so the extra bits will be set to zero if and
3847 only if C is signed-extended to its full width. If MASK is nonzero,
3848 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3851 unextend (tree c, int p, int unsignedp, tree mask)
3853 tree type = TREE_TYPE (c);
3854 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3857 if (p == modesize || unsignedp)
3860 /* We work by getting just the sign bit into the low-order bit, then
3861 into the high-order bit, then sign-extend. We then XOR that value
3863 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3864 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3866 /* We must use a signed type in order to get an arithmetic right shift.
3867 However, we must also avoid introducing accidental overflows, so that
3868 a subsequent call to integer_zerop will work. Hence we must
3869 do the type conversion here. At this point, the constant is either
3870 zero or one, and the conversion to a signed type can never overflow.
3871 We could get an overflow if this conversion is done anywhere else. */
3872 if (TYPE_UNSIGNED (type))
3873 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
3875 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3876 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3878 temp = const_binop (BIT_AND_EXPR, temp,
3879 fold_convert (TREE_TYPE (c), mask), 0);
3880 /* If necessary, convert the type back to match the type of C. */
3881 if (TYPE_UNSIGNED (type))
3882 temp = fold_convert (type, temp);
3884 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3887 /* Find ways of folding logical expressions of LHS and RHS:
3888 Try to merge two comparisons to the same innermost item.
3889 Look for range tests like "ch >= '0' && ch <= '9'".
3890 Look for combinations of simple terms on machines with expensive branches
3891 and evaluate the RHS unconditionally.
3893 For example, if we have p->a == 2 && p->b == 4 and we can make an
3894 object large enough to span both A and B, we can do this with a comparison
3895 against the object ANDed with the a mask.
3897 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3898 operations to do this with one comparison.
3900 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3901 function and the one above.
3903 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3904 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3906 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3909 We return the simplified tree or 0 if no optimization is possible. */
3912 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3914 /* If this is the "or" of two comparisons, we can do something if
3915 the comparisons are NE_EXPR. If this is the "and", we can do something
3916 if the comparisons are EQ_EXPR. I.e.,
3917 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3919 WANTED_CODE is this operation code. For single bit fields, we can
3920 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3921 comparison for one-bit fields. */
3923 enum tree_code wanted_code;
3924 enum tree_code lcode, rcode;
3925 tree ll_arg, lr_arg, rl_arg, rr_arg;
3926 tree ll_inner, lr_inner, rl_inner, rr_inner;
3927 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3928 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3929 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3930 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3931 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3932 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3933 enum machine_mode lnmode, rnmode;
3934 tree ll_mask, lr_mask, rl_mask, rr_mask;
3935 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3936 tree l_const, r_const;
3937 tree lntype, rntype, result;
3938 int first_bit, end_bit;
3941 /* Start by getting the comparison codes. Fail if anything is volatile.
3942 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3943 it were surrounded with a NE_EXPR. */
3945 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3948 lcode = TREE_CODE (lhs);
3949 rcode = TREE_CODE (rhs);
3951 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3952 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3954 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3955 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3957 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3960 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3961 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3963 ll_arg = TREE_OPERAND (lhs, 0);
3964 lr_arg = TREE_OPERAND (lhs, 1);
3965 rl_arg = TREE_OPERAND (rhs, 0);
3966 rr_arg = TREE_OPERAND (rhs, 1);
3968 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3969 if (simple_operand_p (ll_arg)
3970 && simple_operand_p (lr_arg)
3971 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3975 if (operand_equal_p (ll_arg, rl_arg, 0)
3976 && operand_equal_p (lr_arg, rr_arg, 0))
3978 int lcompcode, rcompcode;
3980 lcompcode = comparison_to_compcode (lcode);
3981 rcompcode = comparison_to_compcode (rcode);
3982 compcode = (code == TRUTH_AND_EXPR)
3983 ? lcompcode & rcompcode
3984 : lcompcode | rcompcode;
3986 else if (operand_equal_p (ll_arg, rr_arg, 0)
3987 && operand_equal_p (lr_arg, rl_arg, 0))
3989 int lcompcode, rcompcode;
3991 rcode = swap_tree_comparison (rcode);
3992 lcompcode = comparison_to_compcode (lcode);
3993 rcompcode = comparison_to_compcode (rcode);
3994 compcode = (code == TRUTH_AND_EXPR)
3995 ? lcompcode & rcompcode
3996 : lcompcode | rcompcode;
4001 if (compcode == COMPCODE_TRUE)
4002 return fold_convert (truth_type, integer_one_node);
4003 else if (compcode == COMPCODE_FALSE)
4004 return fold_convert (truth_type, integer_zero_node);
4005 else if (compcode != -1)
4006 return build (compcode_to_comparison (compcode),
4007 truth_type, ll_arg, lr_arg);
4010 /* If the RHS can be evaluated unconditionally and its operands are
4011 simple, it wins to evaluate the RHS unconditionally on machines
4012 with expensive branches. In this case, this isn't a comparison
4013 that can be merged. Avoid doing this if the RHS is a floating-point
4014 comparison since those can trap. */
4016 if (BRANCH_COST >= 2
4017 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4018 && simple_operand_p (rl_arg)
4019 && simple_operand_p (rr_arg))
4021 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4022 if (code == TRUTH_OR_EXPR
4023 && lcode == NE_EXPR && integer_zerop (lr_arg)
4024 && rcode == NE_EXPR && integer_zerop (rr_arg)
4025 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4026 return build (NE_EXPR, truth_type,
4027 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4031 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4032 if (code == TRUTH_AND_EXPR
4033 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4034 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4035 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4036 return build (EQ_EXPR, truth_type,
4037 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4041 return build (code, truth_type, lhs, rhs);
4044 /* See if the comparisons can be merged. Then get all the parameters for
4047 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4048 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4052 ll_inner = decode_field_reference (ll_arg,
4053 &ll_bitsize, &ll_bitpos, &ll_mode,
4054 &ll_unsignedp, &volatilep, &ll_mask,
4056 lr_inner = decode_field_reference (lr_arg,
4057 &lr_bitsize, &lr_bitpos, &lr_mode,
4058 &lr_unsignedp, &volatilep, &lr_mask,
4060 rl_inner = decode_field_reference (rl_arg,
4061 &rl_bitsize, &rl_bitpos, &rl_mode,
4062 &rl_unsignedp, &volatilep, &rl_mask,
4064 rr_inner = decode_field_reference (rr_arg,
4065 &rr_bitsize, &rr_bitpos, &rr_mode,
4066 &rr_unsignedp, &volatilep, &rr_mask,
4069 /* It must be true that the inner operation on the lhs of each
4070 comparison must be the same if we are to be able to do anything.
4071 Then see if we have constants. If not, the same must be true for
4073 if (volatilep || ll_inner == 0 || rl_inner == 0
4074 || ! operand_equal_p (ll_inner, rl_inner, 0))
4077 if (TREE_CODE (lr_arg) == INTEGER_CST
4078 && TREE_CODE (rr_arg) == INTEGER_CST)
4079 l_const = lr_arg, r_const = rr_arg;
4080 else if (lr_inner == 0 || rr_inner == 0
4081 || ! operand_equal_p (lr_inner, rr_inner, 0))
4084 l_const = r_const = 0;
4086 /* If either comparison code is not correct for our logical operation,
4087 fail. However, we can convert a one-bit comparison against zero into
4088 the opposite comparison against that bit being set in the field. */
4090 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4091 if (lcode != wanted_code)
4093 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4095 /* Make the left operand unsigned, since we are only interested
4096 in the value of one bit. Otherwise we are doing the wrong
4105 /* This is analogous to the code for l_const above. */
4106 if (rcode != wanted_code)
4108 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4117 /* After this point all optimizations will generate bit-field
4118 references, which we might not want. */
4119 if (! lang_hooks.can_use_bit_fields_p ())
4122 /* See if we can find a mode that contains both fields being compared on
4123 the left. If we can't, fail. Otherwise, update all constants and masks
4124 to be relative to a field of that size. */
4125 first_bit = MIN (ll_bitpos, rl_bitpos);
4126 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4127 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4128 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4130 if (lnmode == VOIDmode)
4133 lnbitsize = GET_MODE_BITSIZE (lnmode);
4134 lnbitpos = first_bit & ~ (lnbitsize - 1);
4135 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4136 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4138 if (BYTES_BIG_ENDIAN)
4140 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4141 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4144 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4145 size_int (xll_bitpos), 0);
4146 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4147 size_int (xrl_bitpos), 0);
4151 l_const = fold_convert (lntype, l_const);
4152 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4153 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4154 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4155 fold (build1 (BIT_NOT_EXPR,
4159 warning ("comparison is always %d", wanted_code == NE_EXPR);
4161 return fold_convert (truth_type,
4162 wanted_code == NE_EXPR
4163 ? integer_one_node : integer_zero_node);
4168 r_const = fold_convert (lntype, r_const);
4169 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4170 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4171 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4172 fold (build1 (BIT_NOT_EXPR,
4176 warning ("comparison is always %d", wanted_code == NE_EXPR);
4178 return fold_convert (truth_type,
4179 wanted_code == NE_EXPR
4180 ? integer_one_node : integer_zero_node);
4184 /* If the right sides are not constant, do the same for it. Also,
4185 disallow this optimization if a size or signedness mismatch occurs
4186 between the left and right sides. */
4189 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4190 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4191 /* Make sure the two fields on the right
4192 correspond to the left without being swapped. */
4193 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4196 first_bit = MIN (lr_bitpos, rr_bitpos);
4197 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4198 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4199 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4201 if (rnmode == VOIDmode)
4204 rnbitsize = GET_MODE_BITSIZE (rnmode);
4205 rnbitpos = first_bit & ~ (rnbitsize - 1);
4206 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4207 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4209 if (BYTES_BIG_ENDIAN)
4211 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4212 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4215 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4216 size_int (xlr_bitpos), 0);
4217 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4218 size_int (xrr_bitpos), 0);
4220 /* Make a mask that corresponds to both fields being compared.
4221 Do this for both items being compared. If the operands are the
4222 same size and the bits being compared are in the same position
4223 then we can do this by masking both and comparing the masked
4225 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4226 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4227 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4229 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4230 ll_unsignedp || rl_unsignedp);
4231 if (! all_ones_mask_p (ll_mask, lnbitsize))
4232 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4234 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4235 lr_unsignedp || rr_unsignedp);
4236 if (! all_ones_mask_p (lr_mask, rnbitsize))
4237 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4239 return build (wanted_code, truth_type, lhs, rhs);
4242 /* There is still another way we can do something: If both pairs of
4243 fields being compared are adjacent, we may be able to make a wider
4244 field containing them both.
4246 Note that we still must mask the lhs/rhs expressions. Furthermore,
4247 the mask must be shifted to account for the shift done by
4248 make_bit_field_ref. */
4249 if ((ll_bitsize + ll_bitpos == rl_bitpos
4250 && lr_bitsize + lr_bitpos == rr_bitpos)
4251 || (ll_bitpos == rl_bitpos + rl_bitsize
4252 && lr_bitpos == rr_bitpos + rr_bitsize))
4256 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4257 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4258 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4259 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4261 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4262 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4263 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4264 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4266 /* Convert to the smaller type before masking out unwanted bits. */
4268 if (lntype != rntype)
4270 if (lnbitsize > rnbitsize)
4272 lhs = fold_convert (rntype, lhs);
4273 ll_mask = fold_convert (rntype, ll_mask);
4276 else if (lnbitsize < rnbitsize)
4278 rhs = fold_convert (lntype, rhs);
4279 lr_mask = fold_convert (lntype, lr_mask);
4284 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4285 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4287 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4288 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4290 return build (wanted_code, truth_type, lhs, rhs);
4296 /* Handle the case of comparisons with constants. If there is something in
4297 common between the masks, those bits of the constants must be the same.
4298 If not, the condition is always false. Test for this to avoid generating
4299 incorrect code below. */
4300 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4301 if (! integer_zerop (result)
4302 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4303 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4305 if (wanted_code == NE_EXPR)
4307 warning ("`or' of unmatched not-equal tests is always 1");
4308 return fold_convert (truth_type, integer_one_node);
4312 warning ("`and' of mutually exclusive equal-tests is always 0");
4313 return fold_convert (truth_type, integer_zero_node);
4317 /* Construct the expression we will return. First get the component
4318 reference we will make. Unless the mask is all ones the width of
4319 that field, perform the mask operation. Then compare with the
4321 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4322 ll_unsignedp || rl_unsignedp);
4324 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4325 if (! all_ones_mask_p (ll_mask, lnbitsize))
4326 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4328 return build (wanted_code, truth_type, result,
4329 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4332 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4336 optimize_minmax_comparison (tree t)
4338 tree type = TREE_TYPE (t);
4339 tree arg0 = TREE_OPERAND (t, 0);
4340 enum tree_code op_code;
4341 tree comp_const = TREE_OPERAND (t, 1);
4343 int consts_equal, consts_lt;
4346 STRIP_SIGN_NOPS (arg0);
4348 op_code = TREE_CODE (arg0);
4349 minmax_const = TREE_OPERAND (arg0, 1);
4350 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4351 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4352 inner = TREE_OPERAND (arg0, 0);
4354 /* If something does not permit us to optimize, return the original tree. */
4355 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4356 || TREE_CODE (comp_const) != INTEGER_CST
4357 || TREE_CONSTANT_OVERFLOW (comp_const)
4358 || TREE_CODE (minmax_const) != INTEGER_CST
4359 || TREE_CONSTANT_OVERFLOW (minmax_const))
4362 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4363 and GT_EXPR, doing the rest with recursive calls using logical
4365 switch (TREE_CODE (t))
4367 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4369 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4373 fold (build (TRUTH_ORIF_EXPR, type,
4374 optimize_minmax_comparison
4375 (build (EQ_EXPR, type, arg0, comp_const)),
4376 optimize_minmax_comparison
4377 (build (GT_EXPR, type, arg0, comp_const))));
4380 if (op_code == MAX_EXPR && consts_equal)
4381 /* MAX (X, 0) == 0 -> X <= 0 */
4382 return fold (build (LE_EXPR, type, inner, comp_const));
4384 else if (op_code == MAX_EXPR && consts_lt)
4385 /* MAX (X, 0) == 5 -> X == 5 */
4386 return fold (build (EQ_EXPR, type, inner, comp_const));
4388 else if (op_code == MAX_EXPR)
4389 /* MAX (X, 0) == -1 -> false */
4390 return omit_one_operand (type, integer_zero_node, inner);
4392 else if (consts_equal)
4393 /* MIN (X, 0) == 0 -> X >= 0 */
4394 return fold (build (GE_EXPR, type, inner, comp_const));
4397 /* MIN (X, 0) == 5 -> false */
4398 return omit_one_operand (type, integer_zero_node, inner);
4401 /* MIN (X, 0) == -1 -> X == -1 */
4402 return fold (build (EQ_EXPR, type, inner, comp_const));
4405 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4406 /* MAX (X, 0) > 0 -> X > 0
4407 MAX (X, 0) > 5 -> X > 5 */
4408 return fold (build (GT_EXPR, type, inner, comp_const));
4410 else if (op_code == MAX_EXPR)
4411 /* MAX (X, 0) > -1 -> true */
4412 return omit_one_operand (type, integer_one_node, inner);
4414 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4415 /* MIN (X, 0) > 0 -> false
4416 MIN (X, 0) > 5 -> false */
4417 return omit_one_operand (type, integer_zero_node, inner);
4420 /* MIN (X, 0) > -1 -> X > -1 */
4421 return fold (build (GT_EXPR, type, inner, comp_const));
4428 /* T is an integer expression that is being multiplied, divided, or taken a
4429 modulus (CODE says which and what kind of divide or modulus) by a
4430 constant C. See if we can eliminate that operation by folding it with
4431 other operations already in T. WIDE_TYPE, if non-null, is a type that
4432 should be used for the computation if wider than our type.
4434 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4435 (X * 2) + (Y * 4). We must, however, be assured that either the original
4436 expression would not overflow or that overflow is undefined for the type
4437 in the language in question.
4439 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4440 the machine has a multiply-accumulate insn or that this is part of an
4441 addressing calculation.
4443 If we return a non-null expression, it is an equivalent form of the
4444 original computation, but need not be in the original type. */
4447 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4449 /* To avoid exponential search depth, refuse to allow recursion past
4450 three levels. Beyond that (1) it's highly unlikely that we'll find
4451 something interesting and (2) we've probably processed it before
4452 when we built the inner expression. */
4461 ret = extract_muldiv_1 (t, c, code, wide_type);
4468 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4470 tree type = TREE_TYPE (t);
4471 enum tree_code tcode = TREE_CODE (t);
4472 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4473 > GET_MODE_SIZE (TYPE_MODE (type)))
4474 ? wide_type : type);
4476 int same_p = tcode == code;
4477 tree op0 = NULL_TREE, op1 = NULL_TREE;
4479 /* Don't deal with constants of zero here; they confuse the code below. */
4480 if (integer_zerop (c))
4483 if (TREE_CODE_CLASS (tcode) == '1')
4484 op0 = TREE_OPERAND (t, 0);
4486 if (TREE_CODE_CLASS (tcode) == '2')
4487 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4489 /* Note that we need not handle conditional operations here since fold
4490 already handles those cases. So just do arithmetic here. */
4494 /* For a constant, we can always simplify if we are a multiply
4495 or (for divide and modulus) if it is a multiple of our constant. */
4496 if (code == MULT_EXPR
4497 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4498 return const_binop (code, fold_convert (ctype, t),
4499 fold_convert (ctype, c), 0);
4502 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4503 /* If op0 is an expression ... */
4504 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4505 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4506 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4507 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4508 /* ... and is unsigned, and its type is smaller than ctype,
4509 then we cannot pass through as widening. */
4510 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4511 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4512 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4513 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4514 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4515 /* ... or its type is larger than ctype,
4516 then we cannot pass through this truncation. */
4517 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4518 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4519 /* ... or signedness changes for division or modulus,
4520 then we cannot pass through this conversion. */
4521 || (code != MULT_EXPR
4522 && (TYPE_UNSIGNED (ctype)
4523 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4526 /* Pass the constant down and see if we can make a simplification. If
4527 we can, replace this expression with the inner simplification for
4528 possible later conversion to our or some other type. */
4529 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4530 && TREE_CODE (t2) == INTEGER_CST
4531 && ! TREE_CONSTANT_OVERFLOW (t2)
4532 && (0 != (t1 = extract_muldiv (op0, t2, code,
4534 ? ctype : NULL_TREE))))
4538 case NEGATE_EXPR: case ABS_EXPR:
4539 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4540 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4543 case MIN_EXPR: case MAX_EXPR:
4544 /* If widening the type changes the signedness, then we can't perform
4545 this optimization as that changes the result. */
4546 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4549 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4550 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4551 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4553 if (tree_int_cst_sgn (c) < 0)
4554 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4556 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4557 fold_convert (ctype, t2)));
4561 case LSHIFT_EXPR: case RSHIFT_EXPR:
4562 /* If the second operand is constant, this is a multiplication
4563 or floor division, by a power of two, so we can treat it that
4564 way unless the multiplier or divisor overflows. */
4565 if (TREE_CODE (op1) == INTEGER_CST
4566 /* const_binop may not detect overflow correctly,
4567 so check for it explicitly here. */
4568 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4569 && TREE_INT_CST_HIGH (op1) == 0
4570 && 0 != (t1 = fold_convert (ctype,
4571 const_binop (LSHIFT_EXPR,
4574 && ! TREE_OVERFLOW (t1))
4575 return extract_muldiv (build (tcode == LSHIFT_EXPR
4576 ? MULT_EXPR : FLOOR_DIV_EXPR,
4577 ctype, fold_convert (ctype, op0), t1),
4578 c, code, wide_type);
4581 case PLUS_EXPR: case MINUS_EXPR:
4582 /* See if we can eliminate the operation on both sides. If we can, we
4583 can return a new PLUS or MINUS. If we can't, the only remaining
4584 cases where we can do anything are if the second operand is a
4586 t1 = extract_muldiv (op0, c, code, wide_type);
4587 t2 = extract_muldiv (op1, c, code, wide_type);
4588 if (t1 != 0 && t2 != 0
4589 && (code == MULT_EXPR
4590 /* If not multiplication, we can only do this if both operands
4591 are divisible by c. */
4592 || (multiple_of_p (ctype, op0, c)
4593 && multiple_of_p (ctype, op1, c))))
4594 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4595 fold_convert (ctype, t2)));
4597 /* If this was a subtraction, negate OP1 and set it to be an addition.
4598 This simplifies the logic below. */
4599 if (tcode == MINUS_EXPR)
4600 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4602 if (TREE_CODE (op1) != INTEGER_CST)
4605 /* If either OP1 or C are negative, this optimization is not safe for
4606 some of the division and remainder types while for others we need
4607 to change the code. */
4608 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4610 if (code == CEIL_DIV_EXPR)
4611 code = FLOOR_DIV_EXPR;
4612 else if (code == FLOOR_DIV_EXPR)
4613 code = CEIL_DIV_EXPR;
4614 else if (code != MULT_EXPR
4615 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4619 /* If it's a multiply or a division/modulus operation of a multiple
4620 of our constant, do the operation and verify it doesn't overflow. */
4621 if (code == MULT_EXPR
4622 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4624 op1 = const_binop (code, fold_convert (ctype, op1),
4625 fold_convert (ctype, c), 0);
4626 /* We allow the constant to overflow with wrapping semantics. */
4628 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4634 /* If we have an unsigned type is not a sizetype, we cannot widen
4635 the operation since it will change the result if the original
4636 computation overflowed. */
4637 if (TYPE_UNSIGNED (ctype)
4638 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4642 /* If we were able to eliminate our operation from the first side,
4643 apply our operation to the second side and reform the PLUS. */
4644 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4645 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4647 /* The last case is if we are a multiply. In that case, we can
4648 apply the distributive law to commute the multiply and addition
4649 if the multiplication of the constants doesn't overflow. */
4650 if (code == MULT_EXPR)
4651 return fold (build (tcode, ctype,
4652 fold (build (code, ctype,
4653 fold_convert (ctype, op0),
4654 fold_convert (ctype, c))),
4660 /* We have a special case here if we are doing something like
4661 (C * 8) % 4 since we know that's zero. */
4662 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4663 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4664 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4665 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4666 return omit_one_operand (type, integer_zero_node, op0);
4668 /* ... fall through ... */
4670 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4671 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4672 /* If we can extract our operation from the LHS, do so and return a
4673 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4674 do something only if the second operand is a constant. */
4676 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4677 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4678 fold_convert (ctype, op1)));
4679 else if (tcode == MULT_EXPR && code == MULT_EXPR
4680 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4681 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4682 fold_convert (ctype, t1)));
4683 else if (TREE_CODE (op1) != INTEGER_CST)
4686 /* If these are the same operation types, we can associate them
4687 assuming no overflow. */
4689 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4690 fold_convert (ctype, c), 0))
4691 && ! TREE_OVERFLOW (t1))
4692 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4694 /* If these operations "cancel" each other, we have the main
4695 optimizations of this pass, which occur when either constant is a
4696 multiple of the other, in which case we replace this with either an
4697 operation or CODE or TCODE.
4699 If we have an unsigned type that is not a sizetype, we cannot do
4700 this since it will change the result if the original computation
4702 if ((! TYPE_UNSIGNED (ctype)
4703 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4705 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4706 || (tcode == MULT_EXPR
4707 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4708 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4710 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4711 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4712 fold_convert (ctype,
4713 const_binop (TRUNC_DIV_EXPR,
4715 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4716 return fold (build (code, ctype, fold_convert (ctype, op0),
4717 fold_convert (ctype,
4718 const_binop (TRUNC_DIV_EXPR,
4730 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4731 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4732 that we may sometimes modify the tree. */
4735 strip_compound_expr (tree t, tree s)
4737 enum tree_code code = TREE_CODE (t);
4739 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4740 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4741 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4742 return TREE_OPERAND (t, 1);
4744 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4745 don't bother handling any other types. */
4746 else if (code == COND_EXPR)
4748 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4749 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4750 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4752 else if (TREE_CODE_CLASS (code) == '1')
4753 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4754 else if (TREE_CODE_CLASS (code) == '<'
4755 || TREE_CODE_CLASS (code) == '2')
4757 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4758 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4764 /* Return a node which has the indicated constant VALUE (either 0 or
4765 1), and is of the indicated TYPE. */
4768 constant_boolean_node (int value, tree type)
4770 if (type == integer_type_node)
4771 return value ? integer_one_node : integer_zero_node;
4772 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4773 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4774 : integer_zero_node);
4777 tree t = build_int_2 (value, 0);
4779 TREE_TYPE (t) = type;
4784 /* Utility function for the following routine, to see how complex a nesting of
4785 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4786 we don't care (to avoid spending too much time on complex expressions.). */
4789 count_cond (tree expr, int lim)
4793 if (TREE_CODE (expr) != COND_EXPR)
4798 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4799 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4800 return MIN (lim, 1 + ctrue + cfalse);
4803 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4804 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4805 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4806 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4807 COND is the first argument to CODE; otherwise (as in the example
4808 given here), it is the second argument. TYPE is the type of the
4809 original expression. Return NULL_TREE if no simplification is
4813 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4814 tree cond, tree arg, int cond_first_p)
4816 tree test, true_value, false_value;
4817 tree lhs = NULL_TREE;
4818 tree rhs = NULL_TREE;
4819 /* In the end, we'll produce a COND_EXPR. Both arms of the
4820 conditional expression will be binary operations. The left-hand
4821 side of the expression to be executed if the condition is true
4822 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4823 of the expression to be executed if the condition is true will be
4824 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4825 but apply to the expression to be executed if the conditional is
4831 /* These are the codes to use for the left-hand side and right-hand
4832 side of the COND_EXPR. Normally, they are the same as CODE. */
4833 enum tree_code lhs_code = code;
4834 enum tree_code rhs_code = code;
4835 /* And these are the types of the expressions. */
4836 tree lhs_type = type;
4837 tree rhs_type = type;
4840 if (TREE_CODE (cond) != COND_EXPR
4841 && TREE_CODE_CLASS (code) == '<')
4844 if (TREE_CODE (arg) == COND_EXPR
4845 && count_cond (cond, 25) + count_cond (arg, 25) > 25)
4848 if (TREE_SIDE_EFFECTS (arg)
4849 && (lang_hooks.decls.global_bindings_p () != 0
4850 || CONTAINS_PLACEHOLDER_P (arg)))
4855 true_rhs = false_rhs = &arg;
4856 true_lhs = &true_value;
4857 false_lhs = &false_value;
4861 true_lhs = false_lhs = &arg;
4862 true_rhs = &true_value;
4863 false_rhs = &false_value;
4866 if (TREE_CODE (cond) == COND_EXPR)
4868 test = TREE_OPERAND (cond, 0);
4869 true_value = TREE_OPERAND (cond, 1);
4870 false_value = TREE_OPERAND (cond, 2);
4871 /* If this operand throws an expression, then it does not make
4872 sense to try to perform a logical or arithmetic operation
4873 involving it. Instead of building `a + throw 3' for example,
4874 we simply build `a, throw 3'. */
4875 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4879 lhs_code = COMPOUND_EXPR;
4880 lhs_type = void_type_node;
4885 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4889 rhs_code = COMPOUND_EXPR;
4890 rhs_type = void_type_node;
4898 tree testtype = TREE_TYPE (cond);
4900 true_value = fold_convert (testtype, integer_one_node);
4901 false_value = fold_convert (testtype, integer_zero_node);
4904 /* If ARG is complex we want to make sure we only evaluate it once. Though
4905 this is only required if it is volatile, it might be more efficient even
4906 if it is not. However, if we succeed in folding one part to a constant,
4907 we do not need to make this SAVE_EXPR. Since we do this optimization
4908 primarily to see if we do end up with constant and this SAVE_EXPR
4909 interferes with later optimizations, suppressing it when we can is
4912 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4913 do so. Don't try to see if the result is a constant if an arm is a
4914 COND_EXPR since we get exponential behavior in that case. */
4916 if (saved_expr_p (arg))
4918 else if (lhs == 0 && rhs == 0
4919 && !TREE_CONSTANT (arg)
4920 && lang_hooks.decls.global_bindings_p () == 0
4921 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4922 || TREE_SIDE_EFFECTS (arg)))
4924 if (TREE_CODE (true_value) != COND_EXPR)
4925 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4927 if (TREE_CODE (false_value) != COND_EXPR)
4928 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4930 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4931 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4933 arg = save_expr (arg);
4935 save = saved_expr_p (arg);
4940 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4942 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4944 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4946 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4947 ahead of the COND_EXPR we made. Otherwise we would have it only
4948 evaluated in one branch, with the other branch using the result
4949 but missing the evaluation code. Beware that the save_expr call
4950 above might not return a SAVE_EXPR, so testing the TREE_CODE
4951 of ARG is not enough to decide here. Â */
4953 return build (COMPOUND_EXPR, type,
4954 fold_convert (void_type_node, arg),
4955 strip_compound_expr (test, arg));
4957 return fold_convert (type, test);
4961 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4963 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4964 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4965 ADDEND is the same as X.
4967 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4968 and finite. The problematic cases are when X is zero, and its mode
4969 has signed zeros. In the case of rounding towards -infinity,
4970 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4971 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4974 fold_real_zero_addition_p (tree type, tree addend, int negate)
4976 if (!real_zerop (addend))
4979 /* Don't allow the fold with -fsignaling-nans. */
4980 if (HONOR_SNANS (TYPE_MODE (type)))
4983 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4984 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4987 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4988 if (TREE_CODE (addend) == REAL_CST
4989 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4992 /* The mode has signed zeros, and we have to honor their sign.
4993 In this situation, there is only one case we can return true for.
4994 X - 0 is the same as X unless rounding towards -infinity is
4996 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4999 /* Subroutine of fold() that checks comparisons of built-in math
5000 functions against real constants.
5002 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5003 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5004 is the type of the result and ARG0 and ARG1 are the operands of the
5005 comparison. ARG1 must be a TREE_REAL_CST.
5007 The function returns the constant folded tree if a simplification
5008 can be made, and NULL_TREE otherwise. */
5011 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5012 tree type, tree arg0, tree arg1)
5016 if (BUILTIN_SQRT_P (fcode))
5018 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5019 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5021 c = TREE_REAL_CST (arg1);
5022 if (REAL_VALUE_NEGATIVE (c))
5024 /* sqrt(x) < y is always false, if y is negative. */
5025 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5026 return omit_one_operand (type,
5027 fold_convert (type, integer_zero_node),
5030 /* sqrt(x) > y is always true, if y is negative and we
5031 don't care about NaNs, i.e. negative values of x. */
5032 if (code == NE_EXPR || !HONOR_NANS (mode))
5033 return omit_one_operand (type,
5034 fold_convert (type, integer_one_node),
5037 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5038 return fold (build (GE_EXPR, type, arg,
5039 build_real (TREE_TYPE (arg), dconst0)));
5041 else if (code == GT_EXPR || code == GE_EXPR)
5045 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5046 real_convert (&c2, mode, &c2);
5048 if (REAL_VALUE_ISINF (c2))
5050 /* sqrt(x) > y is x == +Inf, when y is very large. */
5051 if (HONOR_INFINITIES (mode))
5052 return fold (build (EQ_EXPR, type, arg,
5053 build_real (TREE_TYPE (arg), c2)));
5055 /* sqrt(x) > y is always false, when y is very large
5056 and we don't care about infinities. */
5057 return omit_one_operand (type,
5058 fold_convert (type, integer_zero_node),
5062 /* sqrt(x) > c is the same as x > c*c. */
5063 return fold (build (code, type, arg,
5064 build_real (TREE_TYPE (arg), c2)));
5066 else if (code == LT_EXPR || code == LE_EXPR)
5070 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5071 real_convert (&c2, mode, &c2);
5073 if (REAL_VALUE_ISINF (c2))
5075 /* sqrt(x) < y is always true, when y is a very large
5076 value and we don't care about NaNs or Infinities. */
5077 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5078 return omit_one_operand (type,
5079 fold_convert (type, integer_one_node),
5082 /* sqrt(x) < y is x != +Inf when y is very large and we
5083 don't care about NaNs. */
5084 if (! HONOR_NANS (mode))
5085 return fold (build (NE_EXPR, type, arg,
5086 build_real (TREE_TYPE (arg), c2)));
5088 /* sqrt(x) < y is x >= 0 when y is very large and we
5089 don't care about Infinities. */
5090 if (! HONOR_INFINITIES (mode))
5091 return fold (build (GE_EXPR, type, arg,
5092 build_real (TREE_TYPE (arg), dconst0)));
5094 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5095 if (lang_hooks.decls.global_bindings_p () != 0
5096 || CONTAINS_PLACEHOLDER_P (arg))
5099 arg = save_expr (arg);
5100 return fold (build (TRUTH_ANDIF_EXPR, type,
5101 fold (build (GE_EXPR, type, arg,
5102 build_real (TREE_TYPE (arg),
5104 fold (build (NE_EXPR, type, arg,
5105 build_real (TREE_TYPE (arg),
5109 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5110 if (! HONOR_NANS (mode))
5111 return fold (build (code, type, arg,
5112 build_real (TREE_TYPE (arg), c2)));
5114 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5115 if (lang_hooks.decls.global_bindings_p () == 0
5116 && ! CONTAINS_PLACEHOLDER_P (arg))
5118 arg = save_expr (arg);
5119 return fold (build (TRUTH_ANDIF_EXPR, type,
5120 fold (build (GE_EXPR, type, arg,
5121 build_real (TREE_TYPE (arg),
5123 fold (build (code, type, arg,
5124 build_real (TREE_TYPE (arg),
5133 /* Subroutine of fold() that optimizes comparisons against Infinities,
5134 either +Inf or -Inf.
5136 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5137 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5138 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5140 The function returns the constant folded tree if a simplification
5141 can be made, and NULL_TREE otherwise. */
5144 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5146 enum machine_mode mode;
5147 REAL_VALUE_TYPE max;
5151 mode = TYPE_MODE (TREE_TYPE (arg0));
5153 /* For negative infinity swap the sense of the comparison. */
5154 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5156 code = swap_tree_comparison (code);
5161 /* x > +Inf is always false, if with ignore sNANs. */
5162 if (HONOR_SNANS (mode))
5164 return omit_one_operand (type,
5165 fold_convert (type, integer_zero_node),
5169 /* x <= +Inf is always true, if we don't case about NaNs. */
5170 if (! HONOR_NANS (mode))
5171 return omit_one_operand (type,
5172 fold_convert (type, integer_one_node),
5175 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5176 if (lang_hooks.decls.global_bindings_p () == 0
5177 && ! CONTAINS_PLACEHOLDER_P (arg0))
5179 arg0 = save_expr (arg0);
5180 return fold (build (EQ_EXPR, type, arg0, arg0));
5186 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5187 real_maxval (&max, neg, mode);
5188 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5189 arg0, build_real (TREE_TYPE (arg0), max)));
5192 /* x < +Inf is always equal to x <= DBL_MAX. */
5193 real_maxval (&max, neg, mode);
5194 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5195 arg0, build_real (TREE_TYPE (arg0), max)));
5198 /* x != +Inf is always equal to !(x > DBL_MAX). */
5199 real_maxval (&max, neg, mode);
5200 if (! HONOR_NANS (mode))
5201 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5202 arg0, build_real (TREE_TYPE (arg0), max)));
5203 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5204 arg0, build_real (TREE_TYPE (arg0), max)));
5205 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5214 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5215 equality/inequality test, then return a simplified form of
5216 the test using shifts and logical operations. Otherwise return
5217 NULL. TYPE is the desired result type. */
5220 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5223 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5225 if (code == TRUTH_NOT_EXPR)
5227 code = TREE_CODE (arg0);
5228 if (code != NE_EXPR && code != EQ_EXPR)
5231 /* Extract the arguments of the EQ/NE. */
5232 arg1 = TREE_OPERAND (arg0, 1);
5233 arg0 = TREE_OPERAND (arg0, 0);
5235 /* This requires us to invert the code. */
5236 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5239 /* If this is testing a single bit, we can optimize the test. */
5240 if ((code == NE_EXPR || code == EQ_EXPR)
5241 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5242 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5244 tree inner = TREE_OPERAND (arg0, 0);
5245 tree type = TREE_TYPE (arg0);
5246 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5247 enum machine_mode operand_mode = TYPE_MODE (type);
5249 tree signed_type, unsigned_type, intermediate_type;
5252 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5253 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5254 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5255 if (arg00 != NULL_TREE)
5257 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5258 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5259 fold_convert (stype, arg00),
5260 fold_convert (stype, integer_zero_node)));
5263 /* At this point, we know that arg0 is not testing the sign bit. */
5264 if (TYPE_PRECISION (type) - 1 == bitnum)
5267 /* Otherwise we have (A & C) != 0 where C is a single bit,
5268 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5269 Similarly for (A & C) == 0. */
5271 /* If INNER is a right shift of a constant and it plus BITNUM does
5272 not overflow, adjust BITNUM and INNER. */
5273 if (TREE_CODE (inner) == RSHIFT_EXPR
5274 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5275 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5276 && bitnum < TYPE_PRECISION (type)
5277 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5278 bitnum - TYPE_PRECISION (type)))
5280 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5281 inner = TREE_OPERAND (inner, 0);
5284 /* If we are going to be able to omit the AND below, we must do our
5285 operations as unsigned. If we must use the AND, we have a choice.
5286 Normally unsigned is faster, but for some machines signed is. */
5287 #ifdef LOAD_EXTEND_OP
5288 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5293 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5294 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5295 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5296 inner = fold_convert (intermediate_type, inner);
5299 inner = build (RSHIFT_EXPR, intermediate_type,
5300 inner, size_int (bitnum));
5302 if (code == EQ_EXPR)
5303 inner = build (BIT_XOR_EXPR, intermediate_type,
5304 inner, integer_one_node);
5306 /* Put the AND last so it can combine with more things. */
5307 inner = build (BIT_AND_EXPR, intermediate_type,
5308 inner, integer_one_node);
5310 /* Make sure to return the proper type. */
5311 inner = fold_convert (result_type, inner);
5318 /* Check whether we are allowed to reorder operands arg0 and arg1,
5319 such that the evaluation of arg1 occurs before arg0. */
5322 reorder_operands_p (tree arg0, tree arg1)
5324 if (! flag_evaluation_order)
5326 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5328 return ! TREE_SIDE_EFFECTS (arg0)
5329 && ! TREE_SIDE_EFFECTS (arg1);
5332 /* Test whether it is preferable two swap two operands, ARG0 and
5333 ARG1, for example because ARG0 is an integer constant and ARG1
5334 isn't. If REORDER is true, only recommend swapping if we can
5335 evaluate the operands in reverse order. */
5338 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5340 STRIP_SIGN_NOPS (arg0);
5341 STRIP_SIGN_NOPS (arg1);
5343 if (TREE_CODE (arg1) == INTEGER_CST)
5345 if (TREE_CODE (arg0) == INTEGER_CST)
5348 if (TREE_CODE (arg1) == REAL_CST)
5350 if (TREE_CODE (arg0) == REAL_CST)
5353 if (TREE_CODE (arg1) == COMPLEX_CST)
5355 if (TREE_CODE (arg0) == COMPLEX_CST)
5358 if (TREE_CONSTANT (arg1))
5360 if (TREE_CONSTANT (arg0))
5366 if (reorder && flag_evaluation_order
5367 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5378 /* Perform constant folding and related simplification of EXPR.
5379 The related simplifications include x*1 => x, x*0 => 0, etc.,
5380 and application of the associative law.
5381 NOP_EXPR conversions may be removed freely (as long as we
5382 are careful not to change the type of the overall expression).
5383 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5384 but we can constant-fold them if they have constant operands. */
5386 #ifdef ENABLE_FOLD_CHECKING
5387 # define fold(x) fold_1 (x)
5388 static tree fold_1 (tree);
5394 const tree t = expr;
5395 const tree type = TREE_TYPE (expr);
5396 tree t1 = NULL_TREE;
5398 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5399 enum tree_code code = TREE_CODE (t);
5400 int kind = TREE_CODE_CLASS (code);
5401 /* WINS will be nonzero when the switch is done
5402 if all operands are constant. */
5405 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5406 Likewise for a SAVE_EXPR that's already been evaluated. */
5407 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5410 /* Return right away if a constant. */
5414 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5418 /* Special case for conversion ops that can have fixed point args. */
5419 arg0 = TREE_OPERAND (t, 0);
5421 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5423 STRIP_SIGN_NOPS (arg0);
5425 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5426 subop = TREE_REALPART (arg0);
5430 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5431 && TREE_CODE (subop) != REAL_CST)
5432 /* Note that TREE_CONSTANT isn't enough:
5433 static var addresses are constant but we can't
5434 do arithmetic on them. */
5437 else if (IS_EXPR_CODE_CLASS (kind))
5439 int len = first_rtl_op (code);
5441 for (i = 0; i < len; i++)
5443 tree op = TREE_OPERAND (t, i);
5447 continue; /* Valid for CALL_EXPR, at least. */
5449 /* Strip any conversions that don't change the mode. This is
5450 safe for every expression, except for a comparison expression
5451 because its signedness is derived from its operands. So, in
5452 the latter case, only strip conversions that don't change the
5455 Note that this is done as an internal manipulation within the
5456 constant folder, in order to find the simplest representation
5457 of the arguments so that their form can be studied. In any
5458 cases, the appropriate type conversions should be put back in
5459 the tree that will get out of the constant folder. */
5461 STRIP_SIGN_NOPS (op);
5465 if (TREE_CODE (op) == COMPLEX_CST)
5466 subop = TREE_REALPART (op);
5470 if (TREE_CODE (subop) != INTEGER_CST
5471 && TREE_CODE (subop) != REAL_CST)
5472 /* Note that TREE_CONSTANT isn't enough:
5473 static var addresses are constant but we can't
5474 do arithmetic on them. */
5484 /* If this is a commutative operation, and ARG0 is a constant, move it
5485 to ARG1 to reduce the number of tests below. */
5486 if (commutative_tree_code (code)
5487 && tree_swap_operands_p (arg0, arg1, true))
5488 return fold (build (code, type, TREE_OPERAND (t, 1),
5489 TREE_OPERAND (t, 0)));
5491 /* Now WINS is set as described above,
5492 ARG0 is the first operand of EXPR,
5493 and ARG1 is the second operand (if it has more than one operand).
5495 First check for cases where an arithmetic operation is applied to a
5496 compound, conditional, or comparison operation. Push the arithmetic
5497 operation inside the compound or conditional to see if any folding
5498 can then be done. Convert comparison to conditional for this purpose.
5499 The also optimizes non-constant cases that used to be done in
5502 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5503 one of the operands is a comparison and the other is a comparison, a
5504 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5505 code below would make the expression more complex. Change it to a
5506 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5507 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5509 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5510 || code == EQ_EXPR || code == NE_EXPR)
5511 && ((truth_value_p (TREE_CODE (arg0))
5512 && (truth_value_p (TREE_CODE (arg1))
5513 || (TREE_CODE (arg1) == BIT_AND_EXPR
5514 && integer_onep (TREE_OPERAND (arg1, 1)))))
5515 || (truth_value_p (TREE_CODE (arg1))
5516 && (truth_value_p (TREE_CODE (arg0))
5517 || (TREE_CODE (arg0) == BIT_AND_EXPR
5518 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5520 tem = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5521 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5525 if (code == EQ_EXPR)
5526 tem = invert_truthvalue (tem);
5531 if (TREE_CODE_CLASS (code) == '1')
5533 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5534 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5535 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5536 else if (TREE_CODE (arg0) == COND_EXPR)
5538 tree arg01 = TREE_OPERAND (arg0, 1);
5539 tree arg02 = TREE_OPERAND (arg0, 2);
5540 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5541 arg01 = fold (build1 (code, type, arg01));
5542 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5543 arg02 = fold (build1 (code, type, arg02));
5544 tem = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5547 /* If this was a conversion, and all we did was to move into
5548 inside the COND_EXPR, bring it back out. But leave it if
5549 it is a conversion from integer to integer and the
5550 result precision is no wider than a word since such a
5551 conversion is cheap and may be optimized away by combine,
5552 while it couldn't if it were outside the COND_EXPR. Then return
5553 so we don't get into an infinite recursion loop taking the
5554 conversion out and then back in. */
5556 if ((code == NOP_EXPR || code == CONVERT_EXPR
5557 || code == NON_LVALUE_EXPR)
5558 && TREE_CODE (tem) == COND_EXPR
5559 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5560 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5561 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5562 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5563 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5564 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5565 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5567 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5568 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5569 tem = build1 (code, type,
5571 TREE_TYPE (TREE_OPERAND
5572 (TREE_OPERAND (tem, 1), 0)),
5573 TREE_OPERAND (tem, 0),
5574 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5575 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5578 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5579 return fold (build (COND_EXPR, type, arg0,
5580 fold (build1 (code, type, integer_one_node)),
5581 fold (build1 (code, type, integer_zero_node))));
5583 else if (TREE_CODE_CLASS (code) == '<'
5584 && TREE_CODE (arg0) == COMPOUND_EXPR)
5585 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5586 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5587 else if (TREE_CODE_CLASS (code) == '<'
5588 && TREE_CODE (arg1) == COMPOUND_EXPR)
5589 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5590 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5591 else if (TREE_CODE_CLASS (code) == '2'
5592 || TREE_CODE_CLASS (code) == '<')
5594 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5595 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5596 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5597 if (TREE_CODE (arg1) == COMPOUND_EXPR
5598 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5599 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5600 fold (build (code, type,
5601 arg0, TREE_OPERAND (arg1, 1))));
5603 if (TREE_CODE (arg0) == COND_EXPR
5604 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5606 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5607 /*cond_first_p=*/1);
5608 if (tem != NULL_TREE)
5612 if (TREE_CODE (arg1) == COND_EXPR
5613 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5615 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5616 /*cond_first_p=*/0);
5617 if (tem != NULL_TREE)
5625 return fold (DECL_INITIAL (t));
5630 case FIX_TRUNC_EXPR:
5632 case FIX_FLOOR_EXPR:
5633 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5634 return TREE_OPERAND (t, 0);
5636 /* Handle cases of two conversions in a row. */
5637 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5638 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5640 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5641 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5642 int inside_int = INTEGRAL_TYPE_P (inside_type);
5643 int inside_ptr = POINTER_TYPE_P (inside_type);
5644 int inside_float = FLOAT_TYPE_P (inside_type);
5645 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5646 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5647 int inter_int = INTEGRAL_TYPE_P (inter_type);
5648 int inter_ptr = POINTER_TYPE_P (inter_type);
5649 int inter_float = FLOAT_TYPE_P (inter_type);
5650 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5651 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5652 int final_int = INTEGRAL_TYPE_P (type);
5653 int final_ptr = POINTER_TYPE_P (type);
5654 int final_float = FLOAT_TYPE_P (type);
5655 unsigned int final_prec = TYPE_PRECISION (type);
5656 int final_unsignedp = TYPE_UNSIGNED (type);
5658 /* In addition to the cases of two conversions in a row
5659 handled below, if we are converting something to its own
5660 type via an object of identical or wider precision, neither
5661 conversion is needed. */
5662 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5663 && ((inter_int && final_int) || (inter_float && final_float))
5664 && inter_prec >= final_prec)
5665 return fold (build1 (code, type,
5666 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5668 /* Likewise, if the intermediate and final types are either both
5669 float or both integer, we don't need the middle conversion if
5670 it is wider than the final type and doesn't change the signedness
5671 (for integers). Avoid this if the final type is a pointer
5672 since then we sometimes need the inner conversion. Likewise if
5673 the outer has a precision not equal to the size of its mode. */
5674 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5675 || (inter_float && inside_float))
5676 && inter_prec >= inside_prec
5677 && (inter_float || inter_unsignedp == inside_unsignedp)
5678 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5679 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5681 return fold (build1 (code, type,
5682 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5684 /* If we have a sign-extension of a zero-extended value, we can
5685 replace that by a single zero-extension. */
5686 if (inside_int && inter_int && final_int
5687 && inside_prec < inter_prec && inter_prec < final_prec
5688 && inside_unsignedp && !inter_unsignedp)
5689 return fold (build1 (code, type,
5690 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5692 /* Two conversions in a row are not needed unless:
5693 - some conversion is floating-point (overstrict for now), or
5694 - the intermediate type is narrower than both initial and
5696 - the intermediate type and innermost type differ in signedness,
5697 and the outermost type is wider than the intermediate, or
5698 - the initial type is a pointer type and the precisions of the
5699 intermediate and final types differ, or
5700 - the final type is a pointer type and the precisions of the
5701 initial and intermediate types differ. */
5702 if (! inside_float && ! inter_float && ! final_float
5703 && (inter_prec > inside_prec || inter_prec > final_prec)
5704 && ! (inside_int && inter_int
5705 && inter_unsignedp != inside_unsignedp
5706 && inter_prec < final_prec)
5707 && ((inter_unsignedp && inter_prec > inside_prec)
5708 == (final_unsignedp && final_prec > inter_prec))
5709 && ! (inside_ptr && inter_prec != final_prec)
5710 && ! (final_ptr && inside_prec != inter_prec)
5711 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5712 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5714 return fold (build1 (code, type,
5715 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5718 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5719 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5720 /* Detect assigning a bitfield. */
5721 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5722 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5724 /* Don't leave an assignment inside a conversion
5725 unless assigning a bitfield. */
5726 tree prev = TREE_OPERAND (t, 0);
5727 tem = copy_node (t);
5728 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5729 /* First do the assignment, then return converted constant. */
5730 tem = build (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5731 TREE_NO_UNUSED_WARNING (tem) = 1;
5732 TREE_USED (tem) = 1;
5736 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5737 constants (if x has signed type, the sign bit cannot be set
5738 in c). This folds extension into the BIT_AND_EXPR. */
5739 if (INTEGRAL_TYPE_P (type)
5740 && TREE_CODE (type) != BOOLEAN_TYPE
5741 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5742 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5744 tree and = TREE_OPERAND (t, 0);
5745 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5748 if (TYPE_UNSIGNED (TREE_TYPE (and))
5749 || (TYPE_PRECISION (type)
5750 <= TYPE_PRECISION (TREE_TYPE (and))))
5752 else if (TYPE_PRECISION (TREE_TYPE (and1))
5753 <= HOST_BITS_PER_WIDE_INT
5754 && host_integerp (and1, 1))
5756 unsigned HOST_WIDE_INT cst;
5758 cst = tree_low_cst (and1, 1);
5759 cst &= (HOST_WIDE_INT) -1
5760 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5761 change = (cst == 0);
5762 #ifdef LOAD_EXTEND_OP
5764 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5767 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5768 and0 = fold_convert (uns, and0);
5769 and1 = fold_convert (uns, and1);
5774 return fold (build (BIT_AND_EXPR, type,
5775 fold_convert (type, and0),
5776 fold_convert (type, and1)));
5779 tem = fold_convert_const (code, type, arg0);
5780 return tem ? tem : t;
5782 case VIEW_CONVERT_EXPR:
5783 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5784 return build1 (VIEW_CONVERT_EXPR, type,
5785 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5789 if (TREE_CODE (arg0) == CONSTRUCTOR
5790 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5792 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5794 return TREE_VALUE (m);
5799 if (TREE_CONSTANT (t) != wins)
5801 tem = copy_node (t);
5802 TREE_CONSTANT (tem) = wins;
5808 if (negate_expr_p (arg0))
5809 return fold_convert (type, negate_expr (arg0));
5814 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
5815 return fold_abs_const (arg0, type);
5816 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5817 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5818 /* Convert fabs((double)float) into (double)fabsf(float). */
5819 else if (TREE_CODE (arg0) == NOP_EXPR
5820 && TREE_CODE (type) == REAL_TYPE)
5822 tree targ0 = strip_float_extensions (arg0);
5824 return fold_convert (type, fold (build1 (ABS_EXPR,
5828 else if (tree_expr_nonnegative_p (arg0))
5833 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5834 return fold_convert (type, arg0);
5835 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5836 return build (COMPLEX_EXPR, type,
5837 TREE_OPERAND (arg0, 0),
5838 negate_expr (TREE_OPERAND (arg0, 1)));
5839 else if (TREE_CODE (arg0) == COMPLEX_CST)
5840 return build_complex (type, TREE_REALPART (arg0),
5841 negate_expr (TREE_IMAGPART (arg0)));
5842 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5843 return fold (build (TREE_CODE (arg0), type,
5844 fold (build1 (CONJ_EXPR, type,
5845 TREE_OPERAND (arg0, 0))),
5846 fold (build1 (CONJ_EXPR,
5847 type, TREE_OPERAND (arg0, 1)))));
5848 else if (TREE_CODE (arg0) == CONJ_EXPR)
5849 return TREE_OPERAND (arg0, 0);
5855 tem = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5856 ~ TREE_INT_CST_HIGH (arg0));
5857 TREE_TYPE (tem) = type;
5858 force_fit_type (tem, 0);
5859 TREE_OVERFLOW (tem) = TREE_OVERFLOW (arg0);
5860 TREE_CONSTANT_OVERFLOW (tem) = TREE_CONSTANT_OVERFLOW (arg0);
5863 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5864 return TREE_OPERAND (arg0, 0);
5868 /* A + (-B) -> A - B */
5869 if (TREE_CODE (arg1) == NEGATE_EXPR)
5870 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5871 /* (-A) + B -> B - A */
5872 if (TREE_CODE (arg0) == NEGATE_EXPR
5873 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
5874 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5875 if (! FLOAT_TYPE_P (type))
5877 if (integer_zerop (arg1))
5878 return non_lvalue (fold_convert (type, arg0));
5880 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5881 with a constant, and the two constants have no bits in common,
5882 we should treat this as a BIT_IOR_EXPR since this may produce more
5884 if (TREE_CODE (arg0) == BIT_AND_EXPR
5885 && TREE_CODE (arg1) == BIT_AND_EXPR
5886 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5887 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5888 && integer_zerop (const_binop (BIT_AND_EXPR,
5889 TREE_OPERAND (arg0, 1),
5890 TREE_OPERAND (arg1, 1), 0)))
5892 code = BIT_IOR_EXPR;
5896 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5897 (plus (plus (mult) (mult)) (foo)) so that we can
5898 take advantage of the factoring cases below. */
5899 if ((TREE_CODE (arg0) == PLUS_EXPR
5900 && TREE_CODE (arg1) == MULT_EXPR)
5901 || (TREE_CODE (arg1) == PLUS_EXPR
5902 && TREE_CODE (arg0) == MULT_EXPR))
5904 tree parg0, parg1, parg, marg;
5906 if (TREE_CODE (arg0) == PLUS_EXPR)
5907 parg = arg0, marg = arg1;
5909 parg = arg1, marg = arg0;
5910 parg0 = TREE_OPERAND (parg, 0);
5911 parg1 = TREE_OPERAND (parg, 1);
5915 if (TREE_CODE (parg0) == MULT_EXPR
5916 && TREE_CODE (parg1) != MULT_EXPR)
5917 return fold (build (PLUS_EXPR, type,
5918 fold (build (PLUS_EXPR, type,
5919 fold_convert (type, parg0),
5920 fold_convert (type, marg))),
5921 fold_convert (type, parg1)));
5922 if (TREE_CODE (parg0) != MULT_EXPR
5923 && TREE_CODE (parg1) == MULT_EXPR)
5924 return fold (build (PLUS_EXPR, type,
5925 fold (build (PLUS_EXPR, type,
5926 fold_convert (type, parg1),
5927 fold_convert (type, marg))),
5928 fold_convert (type, parg0)));
5931 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5933 tree arg00, arg01, arg10, arg11;
5934 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5936 /* (A * C) + (B * C) -> (A+B) * C.
5937 We are most concerned about the case where C is a constant,
5938 but other combinations show up during loop reduction. Since
5939 it is not difficult, try all four possibilities. */
5941 arg00 = TREE_OPERAND (arg0, 0);
5942 arg01 = TREE_OPERAND (arg0, 1);
5943 arg10 = TREE_OPERAND (arg1, 0);
5944 arg11 = TREE_OPERAND (arg1, 1);
5947 if (operand_equal_p (arg01, arg11, 0))
5948 same = arg01, alt0 = arg00, alt1 = arg10;
5949 else if (operand_equal_p (arg00, arg10, 0))
5950 same = arg00, alt0 = arg01, alt1 = arg11;
5951 else if (operand_equal_p (arg00, arg11, 0))
5952 same = arg00, alt0 = arg01, alt1 = arg10;
5953 else if (operand_equal_p (arg01, arg10, 0))
5954 same = arg01, alt0 = arg00, alt1 = arg11;
5956 /* No identical multiplicands; see if we can find a common
5957 power-of-two factor in non-power-of-two multiplies. This
5958 can help in multi-dimensional array access. */
5959 else if (TREE_CODE (arg01) == INTEGER_CST
5960 && TREE_CODE (arg11) == INTEGER_CST
5961 && TREE_INT_CST_HIGH (arg01) == 0
5962 && TREE_INT_CST_HIGH (arg11) == 0)
5964 HOST_WIDE_INT int01, int11, tmp;
5965 int01 = TREE_INT_CST_LOW (arg01);
5966 int11 = TREE_INT_CST_LOW (arg11);
5968 /* Move min of absolute values to int11. */
5969 if ((int01 >= 0 ? int01 : -int01)
5970 < (int11 >= 0 ? int11 : -int11))
5972 tmp = int01, int01 = int11, int11 = tmp;
5973 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5974 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5977 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5979 alt0 = fold (build (MULT_EXPR, type, arg00,
5980 build_int_2 (int01 / int11, 0)));
5987 return fold (build (MULT_EXPR, type,
5988 fold (build (PLUS_EXPR, type, alt0, alt1)),
5994 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5995 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5996 return non_lvalue (fold_convert (type, arg0));
5998 /* Likewise if the operands are reversed. */
5999 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6000 return non_lvalue (fold_convert (type, arg1));
6002 /* Convert x+x into x*2.0. */
6003 if (operand_equal_p (arg0, arg1, 0)
6004 && SCALAR_FLOAT_TYPE_P (type))
6005 return fold (build (MULT_EXPR, type, arg0,
6006 build_real (type, dconst2)));
6008 /* Convert x*c+x into x*(c+1). */
6009 if (flag_unsafe_math_optimizations
6010 && TREE_CODE (arg0) == MULT_EXPR
6011 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6012 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6013 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6017 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6018 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6019 return fold (build (MULT_EXPR, type, arg1,
6020 build_real (type, c)));
6023 /* Convert x+x*c into x*(c+1). */
6024 if (flag_unsafe_math_optimizations
6025 && TREE_CODE (arg1) == MULT_EXPR
6026 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6027 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6028 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6032 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6033 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6034 return fold (build (MULT_EXPR, type, arg0,
6035 build_real (type, c)));
6038 /* Convert x*c1+x*c2 into x*(c1+c2). */
6039 if (flag_unsafe_math_optimizations
6040 && TREE_CODE (arg0) == MULT_EXPR
6041 && TREE_CODE (arg1) == MULT_EXPR
6042 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6043 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6044 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6045 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6046 && operand_equal_p (TREE_OPERAND (arg0, 0),
6047 TREE_OPERAND (arg1, 0), 0))
6049 REAL_VALUE_TYPE c1, c2;
6051 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6052 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6053 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6054 return fold (build (MULT_EXPR, type,
6055 TREE_OPERAND (arg0, 0),
6056 build_real (type, c1)));
6058 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6059 if (flag_unsafe_math_optimizations
6060 && TREE_CODE (arg1) == PLUS_EXPR
6061 && TREE_CODE (arg0) != MULT_EXPR)
6063 tree tree10 = TREE_OPERAND (arg1, 0);
6064 tree tree11 = TREE_OPERAND (arg1, 1);
6065 if (TREE_CODE (tree11) == MULT_EXPR
6066 && TREE_CODE (tree10) == MULT_EXPR)
6069 tree0 = fold (build (PLUS_EXPR, type, arg0, tree10));
6070 return fold (build (PLUS_EXPR, type, tree0, tree11));
6073 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6074 if (flag_unsafe_math_optimizations
6075 && TREE_CODE (arg0) == PLUS_EXPR
6076 && TREE_CODE (arg1) != MULT_EXPR)
6078 tree tree00 = TREE_OPERAND (arg0, 0);
6079 tree tree01 = TREE_OPERAND (arg0, 1);
6080 if (TREE_CODE (tree01) == MULT_EXPR
6081 && TREE_CODE (tree00) == MULT_EXPR)
6084 tree0 = fold (build (PLUS_EXPR, type, tree01, arg1));
6085 return fold (build (PLUS_EXPR, type, tree00, tree0));
6091 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6092 is a rotate of A by C1 bits. */
6093 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6094 is a rotate of A by B bits. */
6096 enum tree_code code0, code1;
6097 code0 = TREE_CODE (arg0);
6098 code1 = TREE_CODE (arg1);
6099 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6100 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6101 && operand_equal_p (TREE_OPERAND (arg0, 0),
6102 TREE_OPERAND (arg1, 0), 0)
6103 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6105 tree tree01, tree11;
6106 enum tree_code code01, code11;
6108 tree01 = TREE_OPERAND (arg0, 1);
6109 tree11 = TREE_OPERAND (arg1, 1);
6110 STRIP_NOPS (tree01);
6111 STRIP_NOPS (tree11);
6112 code01 = TREE_CODE (tree01);
6113 code11 = TREE_CODE (tree11);
6114 if (code01 == INTEGER_CST
6115 && code11 == INTEGER_CST
6116 && TREE_INT_CST_HIGH (tree01) == 0
6117 && TREE_INT_CST_HIGH (tree11) == 0
6118 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6119 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6120 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6121 code0 == LSHIFT_EXPR ? tree01 : tree11);
6122 else if (code11 == MINUS_EXPR)
6124 tree tree110, tree111;
6125 tree110 = TREE_OPERAND (tree11, 0);
6126 tree111 = TREE_OPERAND (tree11, 1);
6127 STRIP_NOPS (tree110);
6128 STRIP_NOPS (tree111);
6129 if (TREE_CODE (tree110) == INTEGER_CST
6130 && 0 == compare_tree_int (tree110,
6132 (TREE_TYPE (TREE_OPERAND
6134 && operand_equal_p (tree01, tree111, 0))
6135 return build ((code0 == LSHIFT_EXPR
6138 type, TREE_OPERAND (arg0, 0), tree01);
6140 else if (code01 == MINUS_EXPR)
6142 tree tree010, tree011;
6143 tree010 = TREE_OPERAND (tree01, 0);
6144 tree011 = TREE_OPERAND (tree01, 1);
6145 STRIP_NOPS (tree010);
6146 STRIP_NOPS (tree011);
6147 if (TREE_CODE (tree010) == INTEGER_CST
6148 && 0 == compare_tree_int (tree010,
6150 (TREE_TYPE (TREE_OPERAND
6152 && operand_equal_p (tree11, tree011, 0))
6153 return build ((code0 != LSHIFT_EXPR
6156 type, TREE_OPERAND (arg0, 0), tree11);
6162 /* In most languages, can't associate operations on floats through
6163 parentheses. Rather than remember where the parentheses were, we
6164 don't associate floats at all, unless the user has specified
6165 -funsafe-math-optimizations. */
6168 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6170 tree var0, con0, lit0, minus_lit0;
6171 tree var1, con1, lit1, minus_lit1;
6173 /* Split both trees into variables, constants, and literals. Then
6174 associate each group together, the constants with literals,
6175 then the result with variables. This increases the chances of
6176 literals being recombined later and of generating relocatable
6177 expressions for the sum of a constant and literal. */
6178 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6179 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6180 code == MINUS_EXPR);
6182 /* Only do something if we found more than two objects. Otherwise,
6183 nothing has changed and we risk infinite recursion. */
6184 if (2 < ((var0 != 0) + (var1 != 0)
6185 + (con0 != 0) + (con1 != 0)
6186 + (lit0 != 0) + (lit1 != 0)
6187 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6189 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6190 if (code == MINUS_EXPR)
6193 var0 = associate_trees (var0, var1, code, type);
6194 con0 = associate_trees (con0, con1, code, type);
6195 lit0 = associate_trees (lit0, lit1, code, type);
6196 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6198 /* Preserve the MINUS_EXPR if the negative part of the literal is
6199 greater than the positive part. Otherwise, the multiplicative
6200 folding code (i.e extract_muldiv) may be fooled in case
6201 unsigned constants are subtracted, like in the following
6202 example: ((X*2 + 4) - 8U)/2. */
6203 if (minus_lit0 && lit0)
6205 if (TREE_CODE (lit0) == INTEGER_CST
6206 && TREE_CODE (minus_lit0) == INTEGER_CST
6207 && tree_int_cst_lt (lit0, minus_lit0))
6209 minus_lit0 = associate_trees (minus_lit0, lit0,
6215 lit0 = associate_trees (lit0, minus_lit0,
6223 return fold_convert (type,
6224 associate_trees (var0, minus_lit0,
6228 con0 = associate_trees (con0, minus_lit0,
6230 return fold_convert (type,
6231 associate_trees (var0, con0,
6236 con0 = associate_trees (con0, lit0, code, type);
6237 return fold_convert (type, associate_trees (var0, con0,
6244 t1 = const_binop (code, arg0, arg1, 0);
6245 if (t1 != NULL_TREE)
6247 /* The return value should always have
6248 the same type as the original expression. */
6249 if (TREE_TYPE (t1) != type)
6250 t1 = fold_convert (type, t1);
6257 /* A - (-B) -> A + B */
6258 if (TREE_CODE (arg1) == NEGATE_EXPR)
6259 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6260 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6261 if (TREE_CODE (arg0) == NEGATE_EXPR
6262 && (FLOAT_TYPE_P (type)
6263 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6264 && negate_expr_p (arg1)
6265 && reorder_operands_p (arg0, arg1))
6266 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6267 TREE_OPERAND (arg0, 0)));
6269 if (! FLOAT_TYPE_P (type))
6271 if (! wins && integer_zerop (arg0))
6272 return negate_expr (fold_convert (type, arg1));
6273 if (integer_zerop (arg1))
6274 return non_lvalue (fold_convert (type, arg0));
6276 /* Fold A - (A & B) into ~B & A. */
6277 if (!TREE_SIDE_EFFECTS (arg0)
6278 && TREE_CODE (arg1) == BIT_AND_EXPR)
6280 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6281 return fold (build (BIT_AND_EXPR, type,
6282 fold (build1 (BIT_NOT_EXPR, type,
6283 TREE_OPERAND (arg1, 0))),
6285 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6286 return fold (build (BIT_AND_EXPR, type,
6287 fold (build1 (BIT_NOT_EXPR, type,
6288 TREE_OPERAND (arg1, 1))),
6292 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6293 any power of 2 minus 1. */
6294 if (TREE_CODE (arg0) == BIT_AND_EXPR
6295 && TREE_CODE (arg1) == BIT_AND_EXPR
6296 && operand_equal_p (TREE_OPERAND (arg0, 0),
6297 TREE_OPERAND (arg1, 0), 0))
6299 tree mask0 = TREE_OPERAND (arg0, 1);
6300 tree mask1 = TREE_OPERAND (arg1, 1);
6301 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6303 if (operand_equal_p (tem, mask1, 0))
6305 tem = fold (build (BIT_XOR_EXPR, type,
6306 TREE_OPERAND (arg0, 0), mask1));
6307 return fold (build (MINUS_EXPR, type, tem, mask1));
6312 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6313 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6314 return non_lvalue (fold_convert (type, arg0));
6316 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6317 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6318 (-ARG1 + ARG0) reduces to -ARG1. */
6319 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6320 return negate_expr (fold_convert (type, arg1));
6322 /* Fold &x - &x. This can happen from &x.foo - &x.
6323 This is unsafe for certain floats even in non-IEEE formats.
6324 In IEEE, it is unsafe because it does wrong for NaNs.
6325 Also note that operand_equal_p is always false if an operand
6328 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6329 && operand_equal_p (arg0, arg1, 0))
6330 return fold_convert (type, integer_zero_node);
6332 /* A - B -> A + (-B) if B is easily negatable. */
6333 if (!wins && negate_expr_p (arg1)
6334 && (FLOAT_TYPE_P (type)
6335 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6336 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6338 if (TREE_CODE (arg0) == MULT_EXPR
6339 && TREE_CODE (arg1) == MULT_EXPR
6340 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6342 /* (A * C) - (B * C) -> (A-B) * C. */
6343 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6344 TREE_OPERAND (arg1, 1), 0))
6345 return fold (build (MULT_EXPR, type,
6346 fold (build (MINUS_EXPR, type,
6347 TREE_OPERAND (arg0, 0),
6348 TREE_OPERAND (arg1, 0))),
6349 TREE_OPERAND (arg0, 1)));
6350 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6351 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6352 TREE_OPERAND (arg1, 0), 0))
6353 return fold (build (MULT_EXPR, type,
6354 TREE_OPERAND (arg0, 0),
6355 fold (build (MINUS_EXPR, type,
6356 TREE_OPERAND (arg0, 1),
6357 TREE_OPERAND (arg1, 1)))));
6363 /* (-A) * (-B) -> A * B */
6364 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6365 return fold (build (MULT_EXPR, type,
6366 TREE_OPERAND (arg0, 0),
6367 negate_expr (arg1)));
6368 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6369 return fold (build (MULT_EXPR, type,
6371 TREE_OPERAND (arg1, 0)));
6373 if (! FLOAT_TYPE_P (type))
6375 if (integer_zerop (arg1))
6376 return omit_one_operand (type, arg1, arg0);
6377 if (integer_onep (arg1))
6378 return non_lvalue (fold_convert (type, arg0));
6380 /* (a * (1 << b)) is (a << b) */
6381 if (TREE_CODE (arg1) == LSHIFT_EXPR
6382 && integer_onep (TREE_OPERAND (arg1, 0)))
6383 return fold (build (LSHIFT_EXPR, type, arg0,
6384 TREE_OPERAND (arg1, 1)));
6385 if (TREE_CODE (arg0) == LSHIFT_EXPR
6386 && integer_onep (TREE_OPERAND (arg0, 0)))
6387 return fold (build (LSHIFT_EXPR, type, arg1,
6388 TREE_OPERAND (arg0, 1)));
6390 if (TREE_CODE (arg1) == INTEGER_CST
6391 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6392 fold_convert (type, arg1),
6394 return fold_convert (type, tem);
6399 /* Maybe fold x * 0 to 0. The expressions aren't the same
6400 when x is NaN, since x * 0 is also NaN. Nor are they the
6401 same in modes with signed zeros, since multiplying a
6402 negative value by 0 gives -0, not +0. */
6403 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6404 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6405 && real_zerop (arg1))
6406 return omit_one_operand (type, arg1, arg0);
6407 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6408 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6409 && real_onep (arg1))
6410 return non_lvalue (fold_convert (type, arg0));
6412 /* Transform x * -1.0 into -x. */
6413 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6414 && real_minus_onep (arg1))
6415 return fold_convert (type, negate_expr (arg0));
6417 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6418 if (flag_unsafe_math_optimizations
6419 && TREE_CODE (arg0) == RDIV_EXPR
6420 && TREE_CODE (arg1) == REAL_CST
6421 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6423 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6426 return fold (build (RDIV_EXPR, type, tem,
6427 TREE_OPERAND (arg0, 1)));
6430 if (flag_unsafe_math_optimizations)
6432 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6433 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6435 /* Optimizations of root(...)*root(...). */
6436 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6438 tree rootfn, arg, arglist;
6439 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6440 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6442 /* Optimize sqrt(x)*sqrt(x) as x. */
6443 if (BUILTIN_SQRT_P (fcode0)
6444 && operand_equal_p (arg00, arg10, 0)
6445 && ! HONOR_SNANS (TYPE_MODE (type)))
6448 /* Optimize root(x)*root(y) as root(x*y). */
6449 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6450 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6451 arglist = build_tree_list (NULL_TREE, arg);
6452 return build_function_call_expr (rootfn, arglist);
6455 /* Optimize expN(x)*expN(y) as expN(x+y). */
6456 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6458 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6459 tree arg = build (PLUS_EXPR, type,
6460 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6461 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6462 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6463 return build_function_call_expr (expfn, arglist);
6466 /* Optimizations of pow(...)*pow(...). */
6467 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6468 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6469 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6471 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6472 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6474 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6475 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6478 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6479 if (operand_equal_p (arg01, arg11, 0))
6481 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6482 tree arg = build (MULT_EXPR, type, arg00, arg10);
6483 tree arglist = tree_cons (NULL_TREE, fold (arg),
6484 build_tree_list (NULL_TREE,
6486 return build_function_call_expr (powfn, arglist);
6489 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6490 if (operand_equal_p (arg00, arg10, 0))
6492 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6493 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6494 tree arglist = tree_cons (NULL_TREE, arg00,
6495 build_tree_list (NULL_TREE,
6497 return build_function_call_expr (powfn, arglist);
6501 /* Optimize tan(x)*cos(x) as sin(x). */
6502 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6503 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6504 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6505 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6506 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6507 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6508 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6509 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6511 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6513 if (sinfn != NULL_TREE)
6514 return build_function_call_expr (sinfn,
6515 TREE_OPERAND (arg0, 1));
6518 /* Optimize x*pow(x,c) as pow(x,c+1). */
6519 if (fcode1 == BUILT_IN_POW
6520 || fcode1 == BUILT_IN_POWF
6521 || fcode1 == BUILT_IN_POWL)
6523 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6524 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6526 if (TREE_CODE (arg11) == REAL_CST
6527 && ! TREE_CONSTANT_OVERFLOW (arg11)
6528 && operand_equal_p (arg0, arg10, 0))
6530 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6534 c = TREE_REAL_CST (arg11);
6535 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6536 arg = build_real (type, c);
6537 arglist = build_tree_list (NULL_TREE, arg);
6538 arglist = tree_cons (NULL_TREE, arg0, arglist);
6539 return build_function_call_expr (powfn, arglist);
6543 /* Optimize pow(x,c)*x as pow(x,c+1). */
6544 if (fcode0 == BUILT_IN_POW
6545 || fcode0 == BUILT_IN_POWF
6546 || fcode0 == BUILT_IN_POWL)
6548 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6549 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6551 if (TREE_CODE (arg01) == REAL_CST
6552 && ! TREE_CONSTANT_OVERFLOW (arg01)
6553 && operand_equal_p (arg1, arg00, 0))
6555 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6559 c = TREE_REAL_CST (arg01);
6560 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6561 arg = build_real (type, c);
6562 arglist = build_tree_list (NULL_TREE, arg);
6563 arglist = tree_cons (NULL_TREE, arg1, arglist);
6564 return build_function_call_expr (powfn, arglist);
6568 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6570 && operand_equal_p (arg0, arg1, 0))
6572 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6576 tree arg = build_real (type, dconst2);
6577 tree arglist = build_tree_list (NULL_TREE, arg);
6578 arglist = tree_cons (NULL_TREE, arg0, arglist);
6579 return build_function_call_expr (powfn, arglist);
6588 if (integer_all_onesp (arg1))
6589 return omit_one_operand (type, arg1, arg0);
6590 if (integer_zerop (arg1))
6591 return non_lvalue (fold_convert (type, arg0));
6592 if (operand_equal_p (arg0, arg1, 0))
6593 return non_lvalue (fold_convert (type, arg0));
6594 t1 = distribute_bit_expr (code, type, arg0, arg1);
6595 if (t1 != NULL_TREE)
6598 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6600 This results in more efficient code for machines without a NAND
6601 instruction. Combine will canonicalize to the first form
6602 which will allow use of NAND instructions provided by the
6603 backend if they exist. */
6604 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6605 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6607 return fold (build1 (BIT_NOT_EXPR, type,
6608 build (BIT_AND_EXPR, type,
6609 TREE_OPERAND (arg0, 0),
6610 TREE_OPERAND (arg1, 0))));
6613 /* See if this can be simplified into a rotate first. If that
6614 is unsuccessful continue in the association code. */
6618 if (integer_zerop (arg1))
6619 return non_lvalue (fold_convert (type, arg0));
6620 if (integer_all_onesp (arg1))
6621 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6622 if (operand_equal_p (arg0, arg1, 0))
6623 return omit_one_operand (type, integer_zero_node, arg0);
6625 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6626 with a constant, and the two constants have no bits in common,
6627 we should treat this as a BIT_IOR_EXPR since this may produce more
6629 if (TREE_CODE (arg0) == BIT_AND_EXPR
6630 && TREE_CODE (arg1) == BIT_AND_EXPR
6631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6632 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6633 && integer_zerop (const_binop (BIT_AND_EXPR,
6634 TREE_OPERAND (arg0, 1),
6635 TREE_OPERAND (arg1, 1), 0)))
6637 code = BIT_IOR_EXPR;
6641 /* See if this can be simplified into a rotate first. If that
6642 is unsuccessful continue in the association code. */
6646 if (integer_all_onesp (arg1))
6647 return non_lvalue (fold_convert (type, arg0));
6648 if (integer_zerop (arg1))
6649 return omit_one_operand (type, arg1, arg0);
6650 if (operand_equal_p (arg0, arg1, 0))
6651 return non_lvalue (fold_convert (type, arg0));
6652 t1 = distribute_bit_expr (code, type, arg0, arg1);
6653 if (t1 != NULL_TREE)
6655 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6656 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6657 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6660 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6662 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6663 && (~TREE_INT_CST_LOW (arg1)
6664 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6665 return fold_convert (type, TREE_OPERAND (arg0, 0));
6668 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6670 This results in more efficient code for machines without a NOR
6671 instruction. Combine will canonicalize to the first form
6672 which will allow use of NOR instructions provided by the
6673 backend if they exist. */
6674 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6675 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6677 return fold (build1 (BIT_NOT_EXPR, type,
6678 build (BIT_IOR_EXPR, type,
6679 TREE_OPERAND (arg0, 0),
6680 TREE_OPERAND (arg1, 0))));
6686 /* Don't touch a floating-point divide by zero unless the mode
6687 of the constant can represent infinity. */
6688 if (TREE_CODE (arg1) == REAL_CST
6689 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6690 && real_zerop (arg1))
6693 /* (-A) / (-B) -> A / B */
6694 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6695 return fold (build (RDIV_EXPR, type,
6696 TREE_OPERAND (arg0, 0),
6697 negate_expr (arg1)));
6698 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6699 return fold (build (RDIV_EXPR, type,
6701 TREE_OPERAND (arg1, 0)));
6703 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6704 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6705 && real_onep (arg1))
6706 return non_lvalue (fold_convert (type, arg0));
6708 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6709 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6710 && real_minus_onep (arg1))
6711 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6713 /* If ARG1 is a constant, we can convert this to a multiply by the
6714 reciprocal. This does not have the same rounding properties,
6715 so only do this if -funsafe-math-optimizations. We can actually
6716 always safely do it if ARG1 is a power of two, but it's hard to
6717 tell if it is or not in a portable manner. */
6718 if (TREE_CODE (arg1) == REAL_CST)
6720 if (flag_unsafe_math_optimizations
6721 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6723 return fold (build (MULT_EXPR, type, arg0, tem));
6724 /* Find the reciprocal if optimizing and the result is exact. */
6728 r = TREE_REAL_CST (arg1);
6729 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6731 tem = build_real (type, r);
6732 return fold (build (MULT_EXPR, type, arg0, tem));
6736 /* Convert A/B/C to A/(B*C). */
6737 if (flag_unsafe_math_optimizations
6738 && TREE_CODE (arg0) == RDIV_EXPR)
6739 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6740 fold (build (MULT_EXPR, type,
6741 TREE_OPERAND (arg0, 1), arg1))));
6743 /* Convert A/(B/C) to (A/B)*C. */
6744 if (flag_unsafe_math_optimizations
6745 && TREE_CODE (arg1) == RDIV_EXPR)
6746 return fold (build (MULT_EXPR, type,
6747 fold (build (RDIV_EXPR, type, arg0,
6748 TREE_OPERAND (arg1, 0))),
6749 TREE_OPERAND (arg1, 1)));
6751 /* Convert C1/(X*C2) into (C1/C2)/X. */
6752 if (flag_unsafe_math_optimizations
6753 && TREE_CODE (arg1) == MULT_EXPR
6754 && TREE_CODE (arg0) == REAL_CST
6755 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6757 tree tem = const_binop (RDIV_EXPR, arg0,
6758 TREE_OPERAND (arg1, 1), 0);
6760 return fold (build (RDIV_EXPR, type, tem,
6761 TREE_OPERAND (arg1, 0)));
6764 if (flag_unsafe_math_optimizations)
6766 enum built_in_function fcode = builtin_mathfn_code (arg1);
6767 /* Optimize x/expN(y) into x*expN(-y). */
6768 if (BUILTIN_EXPONENT_P (fcode))
6770 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6771 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
6772 tree arglist = build_tree_list (NULL_TREE,
6773 fold_convert (type, arg));
6774 arg1 = build_function_call_expr (expfn, arglist);
6775 return fold (build (MULT_EXPR, type, arg0, arg1));
6778 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6779 if (fcode == BUILT_IN_POW
6780 || fcode == BUILT_IN_POWF
6781 || fcode == BUILT_IN_POWL)
6783 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6784 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6785 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6786 tree neg11 = fold_convert (type, negate_expr (arg11));
6787 tree arglist = tree_cons(NULL_TREE, arg10,
6788 build_tree_list (NULL_TREE, neg11));
6789 arg1 = build_function_call_expr (powfn, arglist);
6790 return fold (build (MULT_EXPR, type, arg0, arg1));
6794 if (flag_unsafe_math_optimizations)
6796 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6797 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6799 /* Optimize sin(x)/cos(x) as tan(x). */
6800 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6801 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6802 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6803 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6804 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6806 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
6808 if (tanfn != NULL_TREE)
6809 return build_function_call_expr (tanfn,
6810 TREE_OPERAND (arg0, 1));
6813 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6814 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6815 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6816 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6817 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6818 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6820 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
6822 if (tanfn != NULL_TREE)
6824 tree tmp = TREE_OPERAND (arg0, 1);
6825 tmp = build_function_call_expr (tanfn, tmp);
6826 return fold (build (RDIV_EXPR, type,
6827 build_real (type, dconst1),
6832 /* Optimize pow(x,c)/x as pow(x,c-1). */
6833 if (fcode0 == BUILT_IN_POW
6834 || fcode0 == BUILT_IN_POWF
6835 || fcode0 == BUILT_IN_POWL)
6837 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6838 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6839 if (TREE_CODE (arg01) == REAL_CST
6840 && ! TREE_CONSTANT_OVERFLOW (arg01)
6841 && operand_equal_p (arg1, arg00, 0))
6843 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6847 c = TREE_REAL_CST (arg01);
6848 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6849 arg = build_real (type, c);
6850 arglist = build_tree_list (NULL_TREE, arg);
6851 arglist = tree_cons (NULL_TREE, arg1, arglist);
6852 return build_function_call_expr (powfn, arglist);
6858 case TRUNC_DIV_EXPR:
6859 case ROUND_DIV_EXPR:
6860 case FLOOR_DIV_EXPR:
6862 case EXACT_DIV_EXPR:
6863 if (integer_onep (arg1))
6864 return non_lvalue (fold_convert (type, arg0));
6865 if (integer_zerop (arg1))
6868 if (!TYPE_UNSIGNED (type)
6869 && TREE_CODE (arg1) == INTEGER_CST
6870 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
6871 && TREE_INT_CST_HIGH (arg1) == -1)
6872 return fold_convert (type, negate_expr (arg0));
6874 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6875 operation, EXACT_DIV_EXPR.
6877 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6878 At one time others generated faster code, it's not clear if they do
6879 after the last round to changes to the DIV code in expmed.c. */
6880 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6881 && multiple_of_p (type, arg0, arg1))
6882 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6884 if (TREE_CODE (arg1) == INTEGER_CST
6885 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6887 return fold_convert (type, tem);
6892 case FLOOR_MOD_EXPR:
6893 case ROUND_MOD_EXPR:
6894 case TRUNC_MOD_EXPR:
6895 if (integer_onep (arg1))
6896 return omit_one_operand (type, integer_zero_node, arg0);
6897 if (integer_zerop (arg1))
6899 /* X % -1 is zero. */
6900 if (!TYPE_UNSIGNED (type)
6901 && TREE_CODE (arg1) == INTEGER_CST
6902 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
6903 && TREE_INT_CST_HIGH (arg1) == -1)
6904 return omit_one_operand (type, integer_zero_node, arg0);
6906 if (TREE_CODE (arg1) == INTEGER_CST
6907 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6909 return fold_convert (type, tem);
6915 if (integer_all_onesp (arg0))
6916 return omit_one_operand (type, arg0, arg1);
6920 /* Optimize -1 >> x for arithmetic right shifts. */
6921 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
6922 return omit_one_operand (type, arg0, arg1);
6923 /* ... fall through ... */
6927 if (integer_zerop (arg1))
6928 return non_lvalue (fold_convert (type, arg0));
6929 if (integer_zerop (arg0))
6930 return omit_one_operand (type, arg0, arg1);
6932 /* Since negative shift count is not well-defined,
6933 don't try to compute it in the compiler. */
6934 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6936 /* Rewrite an LROTATE_EXPR by a constant into an
6937 RROTATE_EXPR by a new constant. */
6938 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6940 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6941 tem = fold_convert (TREE_TYPE (arg1), tem);
6942 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6943 return fold (build (RROTATE_EXPR, type, arg0, tem));
6946 /* If we have a rotate of a bit operation with the rotate count and
6947 the second operand of the bit operation both constant,
6948 permute the two operations. */
6949 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6950 && (TREE_CODE (arg0) == BIT_AND_EXPR
6951 || TREE_CODE (arg0) == BIT_IOR_EXPR
6952 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6953 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6954 return fold (build (TREE_CODE (arg0), type,
6955 fold (build (code, type,
6956 TREE_OPERAND (arg0, 0), arg1)),
6957 fold (build (code, type,
6958 TREE_OPERAND (arg0, 1), arg1))));
6960 /* Two consecutive rotates adding up to the width of the mode can
6962 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6963 && TREE_CODE (arg0) == RROTATE_EXPR
6964 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6965 && TREE_INT_CST_HIGH (arg1) == 0
6966 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6967 && ((TREE_INT_CST_LOW (arg1)
6968 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6969 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6970 return TREE_OPERAND (arg0, 0);
6975 if (operand_equal_p (arg0, arg1, 0))
6976 return omit_one_operand (type, arg0, arg1);
6977 if (INTEGRAL_TYPE_P (type)
6978 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6979 return omit_one_operand (type, arg1, arg0);
6983 if (operand_equal_p (arg0, arg1, 0))
6984 return omit_one_operand (type, arg0, arg1);
6985 if (INTEGRAL_TYPE_P (type)
6986 && TYPE_MAX_VALUE (type)
6987 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6988 return omit_one_operand (type, arg1, arg0);
6991 case TRUTH_NOT_EXPR:
6992 /* Note that the operand of this must be an int
6993 and its values must be 0 or 1.
6994 ("true" is a fixed value perhaps depending on the language,
6995 but we don't handle values other than 1 correctly yet.) */
6996 tem = invert_truthvalue (arg0);
6997 /* Avoid infinite recursion. */
6998 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7000 tem = fold_single_bit_test (code, arg0, arg1, type);
7005 return fold_convert (type, tem);
7007 case TRUTH_ANDIF_EXPR:
7008 /* Note that the operands of this must be ints
7009 and their values must be 0 or 1.
7010 ("true" is a fixed value perhaps depending on the language.) */
7011 /* If first arg is constant zero, return it. */
7012 if (integer_zerop (arg0))
7013 return fold_convert (type, arg0);
7014 case TRUTH_AND_EXPR:
7015 /* If either arg is constant true, drop it. */
7016 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7017 return non_lvalue (fold_convert (type, arg1));
7018 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7019 /* Preserve sequence points. */
7020 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7021 return non_lvalue (fold_convert (type, arg0));
7022 /* If second arg is constant zero, result is zero, but first arg
7023 must be evaluated. */
7024 if (integer_zerop (arg1))
7025 return omit_one_operand (type, arg1, arg0);
7026 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7027 case will be handled here. */
7028 if (integer_zerop (arg0))
7029 return omit_one_operand (type, arg0, arg1);
7032 /* We only do these simplifications if we are optimizing. */
7036 /* Check for things like (A || B) && (A || C). We can convert this
7037 to A || (B && C). Note that either operator can be any of the four
7038 truth and/or operations and the transformation will still be
7039 valid. Also note that we only care about order for the
7040 ANDIF and ORIF operators. If B contains side effects, this
7041 might change the truth-value of A. */
7042 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7043 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7044 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7045 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7046 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7047 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7049 tree a00 = TREE_OPERAND (arg0, 0);
7050 tree a01 = TREE_OPERAND (arg0, 1);
7051 tree a10 = TREE_OPERAND (arg1, 0);
7052 tree a11 = TREE_OPERAND (arg1, 1);
7053 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7054 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7055 && (code == TRUTH_AND_EXPR
7056 || code == TRUTH_OR_EXPR));
7058 if (operand_equal_p (a00, a10, 0))
7059 return fold (build (TREE_CODE (arg0), type, a00,
7060 fold (build (code, type, a01, a11))));
7061 else if (commutative && operand_equal_p (a00, a11, 0))
7062 return fold (build (TREE_CODE (arg0), type, a00,
7063 fold (build (code, type, a01, a10))));
7064 else if (commutative && operand_equal_p (a01, a10, 0))
7065 return fold (build (TREE_CODE (arg0), type, a01,
7066 fold (build (code, type, a00, a11))));
7068 /* This case if tricky because we must either have commutative
7069 operators or else A10 must not have side-effects. */
7071 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7072 && operand_equal_p (a01, a11, 0))
7073 return fold (build (TREE_CODE (arg0), type,
7074 fold (build (code, type, a00, a10)),
7078 /* See if we can build a range comparison. */
7079 if (0 != (tem = fold_range_test (t)))
7082 /* Check for the possibility of merging component references. If our
7083 lhs is another similar operation, try to merge its rhs with our
7084 rhs. Then try to merge our lhs and rhs. */
7085 if (TREE_CODE (arg0) == code
7086 && 0 != (tem = fold_truthop (code, type,
7087 TREE_OPERAND (arg0, 1), arg1)))
7088 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7090 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7095 case TRUTH_ORIF_EXPR:
7096 /* Note that the operands of this must be ints
7097 and their values must be 0 or true.
7098 ("true" is a fixed value perhaps depending on the language.) */
7099 /* If first arg is constant true, return it. */
7100 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7101 return fold_convert (type, arg0);
7103 /* If either arg is constant zero, drop it. */
7104 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7105 return non_lvalue (fold_convert (type, arg1));
7106 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7107 /* Preserve sequence points. */
7108 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7109 return non_lvalue (fold_convert (type, arg0));
7110 /* If second arg is constant true, result is true, but we must
7111 evaluate first arg. */
7112 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7113 return omit_one_operand (type, arg1, arg0);
7114 /* Likewise for first arg, but note this only occurs here for
7116 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7117 return omit_one_operand (type, arg0, arg1);
7120 case TRUTH_XOR_EXPR:
7121 /* If either arg is constant zero, drop it. */
7122 if (integer_zerop (arg0))
7123 return non_lvalue (fold_convert (type, arg1));
7124 if (integer_zerop (arg1))
7125 return non_lvalue (fold_convert (type, arg0));
7126 /* If either arg is constant true, this is a logical inversion. */
7127 if (integer_onep (arg0))
7128 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7129 if (integer_onep (arg1))
7130 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7139 /* If one arg is a real or integer constant, put it last. */
7140 if (tree_swap_operands_p (arg0, arg1, true))
7141 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7143 /* If this is an equality comparison of the address of a non-weak
7144 object against zero, then we know the result. */
7145 if ((code == EQ_EXPR || code == NE_EXPR)
7146 && TREE_CODE (arg0) == ADDR_EXPR
7147 && DECL_P (TREE_OPERAND (arg0, 0))
7148 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7149 && integer_zerop (arg1))
7151 if (code == EQ_EXPR)
7152 return fold_convert (type, integer_zero_node);
7154 return fold_convert (type, integer_one_node);
7157 /* If this is an equality comparison of the address of two non-weak,
7158 unaliased symbols neither of which are extern (since we do not
7159 have access to attributes for externs), then we know the result. */
7160 if ((code == EQ_EXPR || code == NE_EXPR)
7161 && TREE_CODE (arg0) == ADDR_EXPR
7162 && DECL_P (TREE_OPERAND (arg0, 0))
7163 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7164 && ! lookup_attribute ("alias",
7165 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7166 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7167 && TREE_CODE (arg1) == ADDR_EXPR
7168 && DECL_P (TREE_OPERAND (arg1, 0))
7169 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7170 && ! lookup_attribute ("alias",
7171 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7172 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7174 if (code == EQ_EXPR)
7175 return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
7176 ? integer_one_node : integer_zero_node));
7178 return fold_convert (type, (operand_equal_p (arg0, arg1, 0)
7179 ? integer_zero_node : integer_one_node));
7182 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7184 tree targ0 = strip_float_extensions (arg0);
7185 tree targ1 = strip_float_extensions (arg1);
7186 tree newtype = TREE_TYPE (targ0);
7188 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7189 newtype = TREE_TYPE (targ1);
7191 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7192 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7193 return fold (build (code, type, fold_convert (newtype, targ0),
7194 fold_convert (newtype, targ1)));
7196 /* (-a) CMP (-b) -> b CMP a */
7197 if (TREE_CODE (arg0) == NEGATE_EXPR
7198 && TREE_CODE (arg1) == NEGATE_EXPR)
7199 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7200 TREE_OPERAND (arg0, 0)));
7202 if (TREE_CODE (arg1) == REAL_CST)
7204 REAL_VALUE_TYPE cst;
7205 cst = TREE_REAL_CST (arg1);
7207 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7208 if (TREE_CODE (arg0) == NEGATE_EXPR)
7210 fold (build (swap_tree_comparison (code), type,
7211 TREE_OPERAND (arg0, 0),
7212 build_real (TREE_TYPE (arg1),
7213 REAL_VALUE_NEGATE (cst))));
7215 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7216 /* a CMP (-0) -> a CMP 0 */
7217 if (REAL_VALUE_MINUS_ZERO (cst))
7218 return fold (build (code, type, arg0,
7219 build_real (TREE_TYPE (arg1), dconst0)));
7221 /* x != NaN is always true, other ops are always false. */
7222 if (REAL_VALUE_ISNAN (cst)
7223 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7225 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7226 return omit_one_operand (type, fold_convert (type, tem), arg0);
7229 /* Fold comparisons against infinity. */
7230 if (REAL_VALUE_ISINF (cst))
7232 tem = fold_inf_compare (code, type, arg0, arg1);
7233 if (tem != NULL_TREE)
7238 /* If this is a comparison of a real constant with a PLUS_EXPR
7239 or a MINUS_EXPR of a real constant, we can convert it into a
7240 comparison with a revised real constant as long as no overflow
7241 occurs when unsafe_math_optimizations are enabled. */
7242 if (flag_unsafe_math_optimizations
7243 && TREE_CODE (arg1) == REAL_CST
7244 && (TREE_CODE (arg0) == PLUS_EXPR
7245 || TREE_CODE (arg0) == MINUS_EXPR)
7246 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7247 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7248 ? MINUS_EXPR : PLUS_EXPR,
7249 arg1, TREE_OPERAND (arg0, 1), 0))
7250 && ! TREE_CONSTANT_OVERFLOW (tem))
7251 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7253 /* Likewise, we can simplify a comparison of a real constant with
7254 a MINUS_EXPR whose first operand is also a real constant, i.e.
7255 (c1 - x) < c2 becomes x > c1-c2. */
7256 if (flag_unsafe_math_optimizations
7257 && TREE_CODE (arg1) == REAL_CST
7258 && TREE_CODE (arg0) == MINUS_EXPR
7259 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7260 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7262 && ! TREE_CONSTANT_OVERFLOW (tem))
7263 return fold (build (swap_tree_comparison (code), type,
7264 TREE_OPERAND (arg0, 1), tem));
7266 /* Fold comparisons against built-in math functions. */
7267 if (TREE_CODE (arg1) == REAL_CST
7268 && flag_unsafe_math_optimizations
7269 && ! flag_errno_math)
7271 enum built_in_function fcode = builtin_mathfn_code (arg0);
7273 if (fcode != END_BUILTINS)
7275 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7276 if (tem != NULL_TREE)
7282 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7283 if (TREE_CONSTANT (arg1)
7284 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7285 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7286 /* This optimization is invalid for ordered comparisons
7287 if CONST+INCR overflows or if foo+incr might overflow.
7288 This optimization is invalid for floating point due to rounding.
7289 For pointer types we assume overflow doesn't happen. */
7290 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7291 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7292 && (code == EQ_EXPR || code == NE_EXPR))))
7294 tree varop, newconst;
7296 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7298 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7299 arg1, TREE_OPERAND (arg0, 1)));
7300 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7301 TREE_OPERAND (arg0, 0),
7302 TREE_OPERAND (arg0, 1));
7306 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7307 arg1, TREE_OPERAND (arg0, 1)));
7308 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7309 TREE_OPERAND (arg0, 0),
7310 TREE_OPERAND (arg0, 1));
7314 /* If VAROP is a reference to a bitfield, we must mask
7315 the constant by the width of the field. */
7316 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7317 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7319 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7320 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7321 tree folded_compare, shift;
7323 /* First check whether the comparison would come out
7324 always the same. If we don't do that we would
7325 change the meaning with the masking. */
7326 folded_compare = fold (build2 (code, type,
7327 TREE_OPERAND (varop, 0),
7329 if (integer_zerop (folded_compare)
7330 || integer_onep (folded_compare))
7331 return omit_one_operand (type, folded_compare, varop);
7333 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7335 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7337 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7341 return fold (build2 (code, type, varop, newconst));
7344 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7345 This transformation affects the cases which are handled in later
7346 optimizations involving comparisons with non-negative constants. */
7347 if (TREE_CODE (arg1) == INTEGER_CST
7348 && TREE_CODE (arg0) != INTEGER_CST
7349 && tree_int_cst_sgn (arg1) > 0)
7354 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7355 return fold (build (GT_EXPR, type, arg0, arg1));
7358 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7359 return fold (build (LE_EXPR, type, arg0, arg1));
7366 /* Comparisons with the highest or lowest possible integer of
7367 the specified size will have known values. */
7369 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7371 if (TREE_CODE (arg1) == INTEGER_CST
7372 && ! TREE_CONSTANT_OVERFLOW (arg1)
7373 && width <= HOST_BITS_PER_WIDE_INT
7374 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7375 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7377 unsigned HOST_WIDE_INT signed_max;
7378 unsigned HOST_WIDE_INT max, min;
7380 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7382 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7384 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7390 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7393 if (TREE_INT_CST_HIGH (arg1) == 0
7394 && TREE_INT_CST_LOW (arg1) == max)
7398 return omit_one_operand (type,
7403 return fold (build (EQ_EXPR, type, arg0, arg1));
7406 return omit_one_operand (type,
7411 return fold (build (NE_EXPR, type, arg0, arg1));
7413 /* The GE_EXPR and LT_EXPR cases above are not normally
7414 reached because of previous transformations. */
7419 else if (TREE_INT_CST_HIGH (arg1) == 0
7420 && TREE_INT_CST_LOW (arg1) == max - 1)
7424 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7425 return fold (build (EQ_EXPR, type, arg0, arg1));
7427 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7428 return fold (build (NE_EXPR, type, arg0, arg1));
7432 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7433 && TREE_INT_CST_LOW (arg1) == min)
7437 return omit_one_operand (type,
7442 return fold (build (EQ_EXPR, type, arg0, arg1));
7445 return omit_one_operand (type,
7450 return fold (build (NE_EXPR, type, arg0, arg1));
7455 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7456 && TREE_INT_CST_LOW (arg1) == min + 1)
7460 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7461 return fold (build (NE_EXPR, type, arg0, arg1));
7463 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7464 return fold (build (EQ_EXPR, type, arg0, arg1));
7469 else if (TREE_INT_CST_HIGH (arg1) == 0
7470 && TREE_INT_CST_LOW (arg1) == signed_max
7471 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7472 /* signed_type does not work on pointer types. */
7473 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7475 /* The following case also applies to X < signed_max+1
7476 and X >= signed_max+1 because previous transformations. */
7477 if (code == LE_EXPR || code == GT_EXPR)
7480 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7481 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7483 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7484 type, fold_convert (st0, arg0),
7485 fold_convert (st1, integer_zero_node)));
7491 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7492 a MINUS_EXPR of a constant, we can convert it into a comparison with
7493 a revised constant as long as no overflow occurs. */
7494 if ((code == EQ_EXPR || code == NE_EXPR)
7495 && TREE_CODE (arg1) == INTEGER_CST
7496 && (TREE_CODE (arg0) == PLUS_EXPR
7497 || TREE_CODE (arg0) == MINUS_EXPR)
7498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7499 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7500 ? MINUS_EXPR : PLUS_EXPR,
7501 arg1, TREE_OPERAND (arg0, 1), 0))
7502 && ! TREE_CONSTANT_OVERFLOW (tem))
7503 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7505 /* Similarly for a NEGATE_EXPR. */
7506 else if ((code == EQ_EXPR || code == NE_EXPR)
7507 && TREE_CODE (arg0) == NEGATE_EXPR
7508 && TREE_CODE (arg1) == INTEGER_CST
7509 && 0 != (tem = negate_expr (arg1))
7510 && TREE_CODE (tem) == INTEGER_CST
7511 && ! TREE_CONSTANT_OVERFLOW (tem))
7512 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7514 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7515 for !=. Don't do this for ordered comparisons due to overflow. */
7516 else if ((code == NE_EXPR || code == EQ_EXPR)
7517 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7518 return fold (build (code, type,
7519 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7521 /* If we are widening one operand of an integer comparison,
7522 see if the other operand is similarly being widened. Perhaps we
7523 can do the comparison in the narrower type. */
7524 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7525 && TREE_CODE (arg0) == NOP_EXPR
7526 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7527 && (code == EQ_EXPR || code == NE_EXPR
7528 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7529 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7530 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7531 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7532 || (TREE_CODE (t1) == INTEGER_CST
7533 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7534 return fold (build (code, type, tem,
7535 fold_convert (TREE_TYPE (tem), t1)));
7537 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7538 constant, we can simplify it. */
7539 else if (TREE_CODE (arg1) == INTEGER_CST
7540 && (TREE_CODE (arg0) == MIN_EXPR
7541 || TREE_CODE (arg0) == MAX_EXPR)
7542 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7543 return optimize_minmax_comparison (t);
7545 /* If we are comparing an ABS_EXPR with a constant, we can
7546 convert all the cases into explicit comparisons, but they may
7547 well not be faster than doing the ABS and one comparison.
7548 But ABS (X) <= C is a range comparison, which becomes a subtraction
7549 and a comparison, and is probably faster. */
7550 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7551 && TREE_CODE (arg0) == ABS_EXPR
7552 && ! TREE_SIDE_EFFECTS (arg0)
7553 && (0 != (tem = negate_expr (arg1)))
7554 && TREE_CODE (tem) == INTEGER_CST
7555 && ! TREE_CONSTANT_OVERFLOW (tem))
7556 return fold (build (TRUTH_ANDIF_EXPR, type,
7557 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7558 build (LE_EXPR, type,
7559 TREE_OPERAND (arg0, 0), arg1)));
7561 /* If this is an EQ or NE comparison with zero and ARG0 is
7562 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7563 two operations, but the latter can be done in one less insn
7564 on machines that have only two-operand insns or on which a
7565 constant cannot be the first operand. */
7566 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7567 && TREE_CODE (arg0) == BIT_AND_EXPR)
7569 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7570 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7572 fold (build (code, type,
7573 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7575 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7576 TREE_OPERAND (arg0, 1),
7577 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7578 fold_convert (TREE_TYPE (arg0),
7581 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7582 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7584 fold (build (code, type,
7585 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7587 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7588 TREE_OPERAND (arg0, 0),
7589 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7590 fold_convert (TREE_TYPE (arg0),
7595 /* If this is an NE or EQ comparison of zero against the result of a
7596 signed MOD operation whose second operand is a power of 2, make
7597 the MOD operation unsigned since it is simpler and equivalent. */
7598 if ((code == NE_EXPR || code == EQ_EXPR)
7599 && integer_zerop (arg1)
7600 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7601 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7602 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7603 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7604 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7605 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7607 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7608 tree newmod = build (TREE_CODE (arg0), newtype,
7609 fold_convert (newtype,
7610 TREE_OPERAND (arg0, 0)),
7611 fold_convert (newtype,
7612 TREE_OPERAND (arg0, 1)));
7614 return build (code, type, newmod, fold_convert (newtype, arg1));
7617 /* If this is an NE comparison of zero with an AND of one, remove the
7618 comparison since the AND will give the correct value. */
7619 if (code == NE_EXPR && integer_zerop (arg1)
7620 && TREE_CODE (arg0) == BIT_AND_EXPR
7621 && integer_onep (TREE_OPERAND (arg0, 1)))
7622 return fold_convert (type, arg0);
7624 /* If we have (A & C) == C where C is a power of 2, convert this into
7625 (A & C) != 0. Similarly for NE_EXPR. */
7626 if ((code == EQ_EXPR || code == NE_EXPR)
7627 && TREE_CODE (arg0) == BIT_AND_EXPR
7628 && integer_pow2p (TREE_OPERAND (arg0, 1))
7629 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7630 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7631 arg0, integer_zero_node));
7633 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7634 2, then fold the expression into shifts and logical operations. */
7635 tem = fold_single_bit_test (code, arg0, arg1, type);
7639 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7640 Similarly for NE_EXPR. */
7641 if ((code == EQ_EXPR || code == NE_EXPR)
7642 && TREE_CODE (arg0) == BIT_AND_EXPR
7643 && TREE_CODE (arg1) == INTEGER_CST
7644 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7647 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7648 arg1, build1 (BIT_NOT_EXPR,
7649 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7650 TREE_OPERAND (arg0, 1))));
7651 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7652 if (integer_nonzerop (dandnotc))
7653 return omit_one_operand (type, rslt, arg0);
7656 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7657 Similarly for NE_EXPR. */
7658 if ((code == EQ_EXPR || code == NE_EXPR)
7659 && TREE_CODE (arg0) == BIT_IOR_EXPR
7660 && TREE_CODE (arg1) == INTEGER_CST
7661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7664 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7665 TREE_OPERAND (arg0, 1),
7666 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7667 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7668 if (integer_nonzerop (candnotd))
7669 return omit_one_operand (type, rslt, arg0);
7672 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7673 and similarly for >= into !=. */
7674 if ((code == LT_EXPR || code == GE_EXPR)
7675 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7676 && TREE_CODE (arg1) == LSHIFT_EXPR
7677 && integer_onep (TREE_OPERAND (arg1, 0)))
7678 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7679 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7680 TREE_OPERAND (arg1, 1)),
7681 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7683 else if ((code == LT_EXPR || code == GE_EXPR)
7684 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7685 && (TREE_CODE (arg1) == NOP_EXPR
7686 || TREE_CODE (arg1) == CONVERT_EXPR)
7687 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7688 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7690 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7691 fold_convert (TREE_TYPE (arg0),
7692 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7693 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7695 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7697 /* Simplify comparison of something with itself. (For IEEE
7698 floating-point, we can only do some of these simplifications.) */
7699 if (operand_equal_p (arg0, arg1, 0))
7704 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7705 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7706 return constant_boolean_node (1, type);
7711 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7712 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7713 return constant_boolean_node (1, type);
7714 return fold (build (EQ_EXPR, type, arg0, arg1));
7717 /* For NE, we can only do this simplification if integer
7718 or we don't honor IEEE floating point NaNs. */
7719 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7720 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7722 /* ... fall through ... */
7725 return constant_boolean_node (0, type);
7731 /* If we are comparing an expression that just has comparisons
7732 of two integer values, arithmetic expressions of those comparisons,
7733 and constants, we can simplify it. There are only three cases
7734 to check: the two values can either be equal, the first can be
7735 greater, or the second can be greater. Fold the expression for
7736 those three values. Since each value must be 0 or 1, we have
7737 eight possibilities, each of which corresponds to the constant 0
7738 or 1 or one of the six possible comparisons.
7740 This handles common cases like (a > b) == 0 but also handles
7741 expressions like ((x > y) - (y > x)) > 0, which supposedly
7742 occur in macroized code. */
7744 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7746 tree cval1 = 0, cval2 = 0;
7749 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7750 /* Don't handle degenerate cases here; they should already
7751 have been handled anyway. */
7752 && cval1 != 0 && cval2 != 0
7753 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7754 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7755 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7756 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7757 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7758 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7759 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7761 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7762 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7764 /* We can't just pass T to eval_subst in case cval1 or cval2
7765 was the same as ARG1. */
7768 = fold (build (code, type,
7769 eval_subst (arg0, cval1, maxval, cval2, minval),
7772 = fold (build (code, type,
7773 eval_subst (arg0, cval1, maxval, cval2, maxval),
7776 = fold (build (code, type,
7777 eval_subst (arg0, cval1, minval, cval2, maxval),
7780 /* All three of these results should be 0 or 1. Confirm they
7781 are. Then use those values to select the proper code
7784 if ((integer_zerop (high_result)
7785 || integer_onep (high_result))
7786 && (integer_zerop (equal_result)
7787 || integer_onep (equal_result))
7788 && (integer_zerop (low_result)
7789 || integer_onep (low_result)))
7791 /* Make a 3-bit mask with the high-order bit being the
7792 value for `>', the next for '=', and the low for '<'. */
7793 switch ((integer_onep (high_result) * 4)
7794 + (integer_onep (equal_result) * 2)
7795 + integer_onep (low_result))
7799 return omit_one_operand (type, integer_zero_node, arg0);
7820 return omit_one_operand (type, integer_one_node, arg0);
7823 tem = build (code, type, cval1, cval2);
7825 return save_expr (tem);
7832 /* If this is a comparison of a field, we may be able to simplify it. */
7833 if (((TREE_CODE (arg0) == COMPONENT_REF
7834 && lang_hooks.can_use_bit_fields_p ())
7835 || TREE_CODE (arg0) == BIT_FIELD_REF)
7836 && (code == EQ_EXPR || code == NE_EXPR)
7837 /* Handle the constant case even without -O
7838 to make sure the warnings are given. */
7839 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7841 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7846 /* If this is a comparison of complex values and either or both sides
7847 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7848 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7849 This may prevent needless evaluations. */
7850 if ((code == EQ_EXPR || code == NE_EXPR)
7851 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7852 && (TREE_CODE (arg0) == COMPLEX_EXPR
7853 || TREE_CODE (arg1) == COMPLEX_EXPR
7854 || TREE_CODE (arg0) == COMPLEX_CST
7855 || TREE_CODE (arg1) == COMPLEX_CST))
7857 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7858 tree real0, imag0, real1, imag1;
7860 arg0 = save_expr (arg0);
7861 arg1 = save_expr (arg1);
7862 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7863 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7864 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7865 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7867 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7870 fold (build (code, type, real0, real1)),
7871 fold (build (code, type, imag0, imag1))));
7874 /* Optimize comparisons of strlen vs zero to a compare of the
7875 first character of the string vs zero. To wit,
7876 strlen(ptr) == 0 => *ptr == 0
7877 strlen(ptr) != 0 => *ptr != 0
7878 Other cases should reduce to one of these two (or a constant)
7879 due to the return value of strlen being unsigned. */
7880 if ((code == EQ_EXPR || code == NE_EXPR)
7881 && integer_zerop (arg1)
7882 && TREE_CODE (arg0) == CALL_EXPR)
7884 tree fndecl = get_callee_fndecl (arg0);
7888 && DECL_BUILT_IN (fndecl)
7889 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7890 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7891 && (arglist = TREE_OPERAND (arg0, 1))
7892 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7893 && ! TREE_CHAIN (arglist))
7894 return fold (build (code, type,
7895 build1 (INDIRECT_REF, char_type_node,
7896 TREE_VALUE(arglist)),
7897 integer_zero_node));
7900 /* Both ARG0 and ARG1 are known to be constants at this point. */
7901 t1 = fold_relational_const (code, type, arg0, arg1);
7902 return (t1 == NULL_TREE ? t : t1);
7905 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7906 so all simple results must be passed through pedantic_non_lvalue. */
7907 if (TREE_CODE (arg0) == INTEGER_CST)
7909 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7910 /* Only optimize constant conditions when the selected branch
7911 has the same type as the COND_EXPR. This avoids optimizing
7912 away "c ? x : throw", where the throw has a void type. */
7913 if (! VOID_TYPE_P (TREE_TYPE (tem))
7914 || VOID_TYPE_P (type))
7915 return pedantic_non_lvalue (tem);
7918 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
7919 return pedantic_omit_one_operand (type, arg1, arg0);
7921 /* If we have A op B ? A : C, we may be able to convert this to a
7922 simpler expression, depending on the operation and the values
7923 of B and C. Signed zeros prevent all of these transformations,
7924 for reasons given above each one. */
7926 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7927 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7928 arg1, TREE_OPERAND (arg0, 1))
7929 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7931 tree arg2 = TREE_OPERAND (t, 2);
7932 enum tree_code comp_code = TREE_CODE (arg0);
7936 /* If we have A op 0 ? A : -A, consider applying the following
7939 A == 0? A : -A same as -A
7940 A != 0? A : -A same as A
7941 A >= 0? A : -A same as abs (A)
7942 A > 0? A : -A same as abs (A)
7943 A <= 0? A : -A same as -abs (A)
7944 A < 0? A : -A same as -abs (A)
7946 None of these transformations work for modes with signed
7947 zeros. If A is +/-0, the first two transformations will
7948 change the sign of the result (from +0 to -0, or vice
7949 versa). The last four will fix the sign of the result,
7950 even though the original expressions could be positive or
7951 negative, depending on the sign of A.
7953 Note that all these transformations are correct if A is
7954 NaN, since the two alternatives (A and -A) are also NaNs. */
7955 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7956 ? real_zerop (TREE_OPERAND (arg0, 1))
7957 : integer_zerop (TREE_OPERAND (arg0, 1)))
7958 && TREE_CODE (arg2) == NEGATE_EXPR
7959 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7963 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
7964 tem = fold_convert (type, negate_expr (tem));
7965 return pedantic_non_lvalue (tem);
7967 return pedantic_non_lvalue (fold_convert (type, arg1));
7970 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7971 arg1 = fold_convert (lang_hooks.types.signed_type
7972 (TREE_TYPE (arg1)), arg1);
7973 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7974 return pedantic_non_lvalue (fold_convert (type, arg1));
7977 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7978 arg1 = fold_convert (lang_hooks.types.signed_type
7979 (TREE_TYPE (arg1)), arg1);
7980 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
7981 arg1 = negate_expr (fold_convert (type, arg1));
7982 return pedantic_non_lvalue (arg1);
7987 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7988 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7989 both transformations are correct when A is NaN: A != 0
7990 is then true, and A == 0 is false. */
7992 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7994 if (comp_code == NE_EXPR)
7995 return pedantic_non_lvalue (fold_convert (type, arg1));
7996 else if (comp_code == EQ_EXPR)
7997 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8000 /* Try some transformations of A op B ? A : B.
8002 A == B? A : B same as B
8003 A != B? A : B same as A
8004 A >= B? A : B same as max (A, B)
8005 A > B? A : B same as max (B, A)
8006 A <= B? A : B same as min (A, B)
8007 A < B? A : B same as min (B, A)
8009 As above, these transformations don't work in the presence
8010 of signed zeros. For example, if A and B are zeros of
8011 opposite sign, the first two transformations will change
8012 the sign of the result. In the last four, the original
8013 expressions give different results for (A=+0, B=-0) and
8014 (A=-0, B=+0), but the transformed expressions do not.
8016 The first two transformations are correct if either A or B
8017 is a NaN. In the first transformation, the condition will
8018 be false, and B will indeed be chosen. In the case of the
8019 second transformation, the condition A != B will be true,
8020 and A will be chosen.
8022 The conversions to max() and min() are not correct if B is
8023 a number and A is not. The conditions in the original
8024 expressions will be false, so all four give B. The min()
8025 and max() versions would give a NaN instead. */
8026 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8027 arg2, TREE_OPERAND (arg0, 0)))
8029 tree comp_op0 = TREE_OPERAND (arg0, 0);
8030 tree comp_op1 = TREE_OPERAND (arg0, 1);
8031 tree comp_type = TREE_TYPE (comp_op0);
8033 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8034 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8044 return pedantic_non_lvalue (fold_convert (type, arg2));
8046 return pedantic_non_lvalue (fold_convert (type, arg1));
8049 /* In C++ a ?: expression can be an lvalue, so put the
8050 operand which will be used if they are equal first
8051 so that we can convert this back to the
8052 corresponding COND_EXPR. */
8053 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8054 return pedantic_non_lvalue (fold_convert
8055 (type, fold (build (MIN_EXPR, comp_type,
8056 (comp_code == LE_EXPR
8057 ? comp_op0 : comp_op1),
8058 (comp_code == LE_EXPR
8059 ? comp_op1 : comp_op0)))));
8063 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8064 return pedantic_non_lvalue (fold_convert
8065 (type, fold (build (MAX_EXPR, comp_type,
8066 (comp_code == GE_EXPR
8067 ? comp_op0 : comp_op1),
8068 (comp_code == GE_EXPR
8069 ? comp_op1 : comp_op0)))));
8076 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8077 we might still be able to simplify this. For example,
8078 if C1 is one less or one more than C2, this might have started
8079 out as a MIN or MAX and been transformed by this function.
8080 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8082 if (INTEGRAL_TYPE_P (type)
8083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8084 && TREE_CODE (arg2) == INTEGER_CST)
8088 /* We can replace A with C1 in this case. */
8089 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8090 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8091 TREE_OPERAND (t, 2)));
8094 /* If C1 is C2 + 1, this is min(A, C2). */
8095 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8096 && operand_equal_p (TREE_OPERAND (arg0, 1),
8097 const_binop (PLUS_EXPR, arg2,
8098 integer_one_node, 0), 1))
8099 return pedantic_non_lvalue
8100 (fold (build (MIN_EXPR, type, arg1, arg2)));
8104 /* If C1 is C2 - 1, this is min(A, C2). */
8105 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8106 && operand_equal_p (TREE_OPERAND (arg0, 1),
8107 const_binop (MINUS_EXPR, arg2,
8108 integer_one_node, 0), 1))
8109 return pedantic_non_lvalue
8110 (fold (build (MIN_EXPR, type, arg1, arg2)));
8114 /* If C1 is C2 - 1, this is max(A, C2). */
8115 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8116 && operand_equal_p (TREE_OPERAND (arg0, 1),
8117 const_binop (MINUS_EXPR, arg2,
8118 integer_one_node, 0), 1))
8119 return pedantic_non_lvalue
8120 (fold (build (MAX_EXPR, type, arg1, arg2)));
8124 /* If C1 is C2 + 1, this is max(A, C2). */
8125 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8126 && operand_equal_p (TREE_OPERAND (arg0, 1),
8127 const_binop (PLUS_EXPR, arg2,
8128 integer_one_node, 0), 1))
8129 return pedantic_non_lvalue
8130 (fold (build (MAX_EXPR, type, arg1, arg2)));
8139 /* If the second operand is simpler than the third, swap them
8140 since that produces better jump optimization results. */
8141 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8142 TREE_OPERAND (t, 2), false))
8144 /* See if this can be inverted. If it can't, possibly because
8145 it was a floating-point inequality comparison, don't do
8147 tem = invert_truthvalue (arg0);
8149 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8150 return fold (build (code, type, tem,
8151 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8154 /* Convert A ? 1 : 0 to simply A. */
8155 if (integer_onep (TREE_OPERAND (t, 1))
8156 && integer_zerop (TREE_OPERAND (t, 2))
8157 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8158 call to fold will try to move the conversion inside
8159 a COND, which will recurse. In that case, the COND_EXPR
8160 is probably the best choice, so leave it alone. */
8161 && type == TREE_TYPE (arg0))
8162 return pedantic_non_lvalue (arg0);
8164 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8165 over COND_EXPR in cases such as floating point comparisons. */
8166 if (integer_zerop (TREE_OPERAND (t, 1))
8167 && integer_onep (TREE_OPERAND (t, 2))
8168 && truth_value_p (TREE_CODE (arg0)))
8169 return pedantic_non_lvalue (fold_convert (type,
8170 invert_truthvalue (arg0)));
8172 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8173 operation is simply A & 2. */
8175 if (integer_zerop (TREE_OPERAND (t, 2))
8176 && TREE_CODE (arg0) == NE_EXPR
8177 && integer_zerop (TREE_OPERAND (arg0, 1))
8178 && integer_pow2p (arg1)
8179 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8180 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8182 return pedantic_non_lvalue (fold_convert (type,
8183 TREE_OPERAND (arg0, 0)));
8185 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8186 if (integer_zerop (TREE_OPERAND (t, 2))
8187 && truth_value_p (TREE_CODE (arg0))
8188 && truth_value_p (TREE_CODE (arg1)))
8189 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8192 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8193 if (integer_onep (TREE_OPERAND (t, 2))
8194 && truth_value_p (TREE_CODE (arg0))
8195 && truth_value_p (TREE_CODE (arg1)))
8197 /* Only perform transformation if ARG0 is easily inverted. */
8198 tem = invert_truthvalue (arg0);
8199 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8200 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8207 /* When pedantic, a compound expression can be neither an lvalue
8208 nor an integer constant expression. */
8209 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8211 /* Don't let (0, 0) be null pointer constant. */
8212 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8213 : fold_convert (type, arg1);
8214 return pedantic_non_lvalue (tem);
8218 return build_complex (type, arg0, arg1);
8222 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8224 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8225 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8226 TREE_OPERAND (arg0, 1));
8227 else if (TREE_CODE (arg0) == COMPLEX_CST)
8228 return TREE_REALPART (arg0);
8229 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8230 return fold (build (TREE_CODE (arg0), type,
8231 fold (build1 (REALPART_EXPR, type,
8232 TREE_OPERAND (arg0, 0))),
8233 fold (build1 (REALPART_EXPR,
8234 type, TREE_OPERAND (arg0, 1)))));
8238 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8239 return fold_convert (type, integer_zero_node);
8240 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8241 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8242 TREE_OPERAND (arg0, 0));
8243 else if (TREE_CODE (arg0) == COMPLEX_CST)
8244 return TREE_IMAGPART (arg0);
8245 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8246 return fold (build (TREE_CODE (arg0), type,
8247 fold (build1 (IMAGPART_EXPR, type,
8248 TREE_OPERAND (arg0, 0))),
8249 fold (build1 (IMAGPART_EXPR, type,
8250 TREE_OPERAND (arg0, 1)))));
8253 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8255 case CLEANUP_POINT_EXPR:
8256 if (! has_cleanups (arg0))
8257 return TREE_OPERAND (t, 0);
8260 enum tree_code code0 = TREE_CODE (arg0);
8261 int kind0 = TREE_CODE_CLASS (code0);
8262 tree arg00 = TREE_OPERAND (arg0, 0);
8265 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8266 return fold (build1 (code0, type,
8267 fold (build1 (CLEANUP_POINT_EXPR,
8268 TREE_TYPE (arg00), arg00))));
8270 if (kind0 == '<' || kind0 == '2'
8271 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8272 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8273 || code0 == TRUTH_XOR_EXPR)
8275 arg01 = TREE_OPERAND (arg0, 1);
8277 if (TREE_CONSTANT (arg00)
8278 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8279 && ! has_cleanups (arg00)))
8280 return fold (build (code0, type, arg00,
8281 fold (build1 (CLEANUP_POINT_EXPR,
8282 TREE_TYPE (arg01), arg01))));
8284 if (TREE_CONSTANT (arg01))
8285 return fold (build (code0, type,
8286 fold (build1 (CLEANUP_POINT_EXPR,
8287 TREE_TYPE (arg00), arg00)),
8295 /* Check for a built-in function. */
8296 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8297 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8299 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8301 tree tmp = fold_builtin (t);
8309 } /* switch (code) */
8312 #ifdef ENABLE_FOLD_CHECKING
8315 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8316 static void fold_check_failed (tree, tree);
8317 void print_fold_checksum (tree);
8319 /* When --enable-checking=fold, compute a digest of expr before
8320 and after actual fold call to see if fold did not accidentally
8321 change original expr. */
8328 unsigned char checksum_before[16], checksum_after[16];
8331 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8332 md5_init_ctx (&ctx);
8333 fold_checksum_tree (expr, &ctx, ht);
8334 md5_finish_ctx (&ctx, checksum_before);
8337 ret = fold_1 (expr);
8339 md5_init_ctx (&ctx);
8340 fold_checksum_tree (expr, &ctx, ht);
8341 md5_finish_ctx (&ctx, checksum_after);
8344 if (memcmp (checksum_before, checksum_after, 16))
8345 fold_check_failed (expr, ret);
8351 print_fold_checksum (tree expr)
8354 unsigned char checksum[16], cnt;
8357 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8358 md5_init_ctx (&ctx);
8359 fold_checksum_tree (expr, &ctx, ht);
8360 md5_finish_ctx (&ctx, checksum);
8362 for (cnt = 0; cnt < 16; ++cnt)
8363 fprintf (stderr, "%02x", checksum[cnt]);
8364 putc ('\n', stderr);
8368 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8370 internal_error ("fold check: original tree changed by fold");
8374 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8377 enum tree_code code;
8378 char buf[sizeof (struct tree_decl)];
8381 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8382 > sizeof (struct tree_decl)
8383 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8387 slot = htab_find_slot (ht, expr, INSERT);
8391 code = TREE_CODE (expr);
8392 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8394 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8395 memcpy (buf, expr, tree_size (expr));
8397 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8399 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8401 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8402 memcpy (buf, expr, tree_size (expr));
8404 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8406 else if (TREE_CODE_CLASS (code) == 't'
8407 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8409 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8410 memcpy (buf, expr, tree_size (expr));
8412 TYPE_POINTER_TO (expr) = NULL;
8413 TYPE_REFERENCE_TO (expr) = NULL;
8415 md5_process_bytes (expr, tree_size (expr), ctx);
8416 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8417 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8418 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8419 len = TREE_CODE_LENGTH (code);
8420 switch (TREE_CODE_CLASS (code))
8426 md5_process_bytes (TREE_STRING_POINTER (expr),
8427 TREE_STRING_LENGTH (expr), ctx);
8430 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8431 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8434 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8444 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8445 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8448 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8449 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8458 case SAVE_EXPR: len = 2; break;
8459 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8460 case RTL_EXPR: len = 0; break;
8461 case WITH_CLEANUP_EXPR: len = 2; break;
8470 for (i = 0; i < len; ++i)
8471 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8474 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8475 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8476 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8477 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8478 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8479 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8480 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8481 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8482 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8483 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8484 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8487 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8488 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8489 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8490 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8491 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8492 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8493 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8494 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8495 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8496 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8505 /* Perform constant folding and related simplification of initializer
8506 expression EXPR. This behaves identically to "fold" but ignores
8507 potential run-time traps and exceptions that fold must preserve. */
8510 fold_initializer (tree expr)
8512 int saved_signaling_nans = flag_signaling_nans;
8513 int saved_trapping_math = flag_trapping_math;
8514 int saved_trapv = flag_trapv;
8517 flag_signaling_nans = 0;
8518 flag_trapping_math = 0;
8521 result = fold (expr);
8523 flag_signaling_nans = saved_signaling_nans;
8524 flag_trapping_math = saved_trapping_math;
8525 flag_trapv = saved_trapv;
8530 /* Determine if first argument is a multiple of second argument. Return 0 if
8531 it is not, or we cannot easily determined it to be.
8533 An example of the sort of thing we care about (at this point; this routine
8534 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8535 fold cases do now) is discovering that
8537 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8543 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8545 This code also handles discovering that
8547 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8549 is a multiple of 8 so we don't have to worry about dealing with a
8552 Note that we *look* inside a SAVE_EXPR only to determine how it was
8553 calculated; it is not safe for fold to do much of anything else with the
8554 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8555 at run time. For example, the latter example above *cannot* be implemented
8556 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8557 evaluation time of the original SAVE_EXPR is not necessarily the same at
8558 the time the new expression is evaluated. The only optimization of this
8559 sort that would be valid is changing
8561 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8565 SAVE_EXPR (I) * SAVE_EXPR (J)
8567 (where the same SAVE_EXPR (J) is used in the original and the
8568 transformed version). */
8571 multiple_of_p (tree type, tree top, tree bottom)
8573 if (operand_equal_p (top, bottom, 0))
8576 if (TREE_CODE (type) != INTEGER_TYPE)
8579 switch (TREE_CODE (top))
8582 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8583 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8587 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8588 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8591 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8595 op1 = TREE_OPERAND (top, 1);
8596 /* const_binop may not detect overflow correctly,
8597 so check for it explicitly here. */
8598 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8599 > TREE_INT_CST_LOW (op1)
8600 && TREE_INT_CST_HIGH (op1) == 0
8601 && 0 != (t1 = fold_convert (type,
8602 const_binop (LSHIFT_EXPR,
8605 && ! TREE_OVERFLOW (t1))
8606 return multiple_of_p (type, t1, bottom);
8611 /* Can't handle conversions from non-integral or wider integral type. */
8612 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8613 || (TYPE_PRECISION (type)
8614 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8617 /* .. fall through ... */
8620 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8623 if (TREE_CODE (bottom) != INTEGER_CST
8624 || (TYPE_UNSIGNED (type)
8625 && (tree_int_cst_sgn (top) < 0
8626 || tree_int_cst_sgn (bottom) < 0)))
8628 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8636 /* Return true if `t' is known to be non-negative. */
8639 tree_expr_nonnegative_p (tree t)
8641 switch (TREE_CODE (t))
8647 return tree_int_cst_sgn (t) >= 0;
8650 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8653 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8654 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8655 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8657 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8658 both unsigned and at least 2 bits shorter than the result. */
8659 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8660 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8661 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8663 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8664 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8665 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8666 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8668 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8669 TYPE_PRECISION (inner2)) + 1;
8670 return prec < TYPE_PRECISION (TREE_TYPE (t));
8676 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8678 /* x * x for floating point x is always non-negative. */
8679 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8681 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8682 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8685 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8686 both unsigned and their total bits is shorter than the result. */
8687 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8688 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8689 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8691 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8692 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8693 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8694 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8695 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8696 < TYPE_PRECISION (TREE_TYPE (t));
8700 case TRUNC_DIV_EXPR:
8702 case FLOOR_DIV_EXPR:
8703 case ROUND_DIV_EXPR:
8704 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8705 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8707 case TRUNC_MOD_EXPR:
8709 case FLOOR_MOD_EXPR:
8710 case ROUND_MOD_EXPR:
8711 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8714 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8715 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8718 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8719 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8722 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8723 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8727 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8728 tree outer_type = TREE_TYPE (t);
8730 if (TREE_CODE (outer_type) == REAL_TYPE)
8732 if (TREE_CODE (inner_type) == REAL_TYPE)
8733 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8734 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8736 if (TYPE_UNSIGNED (inner_type))
8738 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8741 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8743 if (TREE_CODE (inner_type) == REAL_TYPE)
8744 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8745 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8746 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8747 && TYPE_UNSIGNED (inner_type);
8753 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8754 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8756 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8758 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8759 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8761 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8762 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8764 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8766 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8768 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8769 case NON_LVALUE_EXPR:
8770 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8772 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8774 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8778 tree fndecl = get_callee_fndecl (t);
8779 tree arglist = TREE_OPERAND (t, 1);
8781 && DECL_BUILT_IN (fndecl)
8782 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8783 switch (DECL_FUNCTION_CODE (fndecl))
8785 #define CASE_BUILTIN_F(BUILT_IN_FN) \
8786 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
8787 #define CASE_BUILTIN_I(BUILT_IN_FN) \
8788 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
8790 CASE_BUILTIN_F (BUILT_IN_ACOS)
8791 CASE_BUILTIN_F (BUILT_IN_ACOSH)
8792 CASE_BUILTIN_F (BUILT_IN_CABS)
8793 CASE_BUILTIN_F (BUILT_IN_COSH)
8794 CASE_BUILTIN_F (BUILT_IN_ERFC)
8795 CASE_BUILTIN_F (BUILT_IN_EXP)
8796 CASE_BUILTIN_F (BUILT_IN_EXP10)
8797 CASE_BUILTIN_F (BUILT_IN_EXP2)
8798 CASE_BUILTIN_F (BUILT_IN_FABS)
8799 CASE_BUILTIN_F (BUILT_IN_FDIM)
8800 CASE_BUILTIN_F (BUILT_IN_FREXP)
8801 CASE_BUILTIN_F (BUILT_IN_HYPOT)
8802 CASE_BUILTIN_F (BUILT_IN_POW10)
8803 CASE_BUILTIN_F (BUILT_IN_SQRT)
8804 CASE_BUILTIN_I (BUILT_IN_FFS)
8805 CASE_BUILTIN_I (BUILT_IN_PARITY)
8806 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
8810 CASE_BUILTIN_F (BUILT_IN_ASINH)
8811 CASE_BUILTIN_F (BUILT_IN_ATAN)
8812 CASE_BUILTIN_F (BUILT_IN_ATANH)
8813 CASE_BUILTIN_F (BUILT_IN_CBRT)
8814 CASE_BUILTIN_F (BUILT_IN_CEIL)
8815 CASE_BUILTIN_F (BUILT_IN_ERF)
8816 CASE_BUILTIN_F (BUILT_IN_EXPM1)
8817 CASE_BUILTIN_F (BUILT_IN_FLOOR)
8818 CASE_BUILTIN_F (BUILT_IN_FMOD)
8819 CASE_BUILTIN_F (BUILT_IN_LDEXP)
8820 CASE_BUILTIN_F (BUILT_IN_LLRINT)
8821 CASE_BUILTIN_F (BUILT_IN_LLROUND)
8822 CASE_BUILTIN_F (BUILT_IN_LRINT)
8823 CASE_BUILTIN_F (BUILT_IN_LROUND)
8824 CASE_BUILTIN_F (BUILT_IN_MODF)
8825 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
8826 CASE_BUILTIN_F (BUILT_IN_POW)
8827 CASE_BUILTIN_F (BUILT_IN_RINT)
8828 CASE_BUILTIN_F (BUILT_IN_ROUND)
8829 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
8830 CASE_BUILTIN_F (BUILT_IN_SINH)
8831 CASE_BUILTIN_F (BUILT_IN_TANH)
8832 CASE_BUILTIN_F (BUILT_IN_TRUNC)
8833 /* True if the 1st argument is nonnegative. */
8834 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8836 CASE_BUILTIN_F(BUILT_IN_FMAX)
8837 /* True if the 1st OR 2nd arguments are nonnegative. */
8838 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8839 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8841 CASE_BUILTIN_F(BUILT_IN_FMIN)
8842 /* True if the 1st AND 2nd arguments are nonnegative. */
8843 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8844 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8846 CASE_BUILTIN_F(BUILT_IN_COPYSIGN)
8847 /* True if the 2nd argument is nonnegative. */
8848 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8852 #undef CASE_BUILTIN_F
8853 #undef CASE_BUILTIN_I
8857 /* ... fall through ... */
8860 if (truth_value_p (TREE_CODE (t)))
8861 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8865 /* We don't know sign of `t', so be conservative and return false. */
8869 /* Return true when T is an address and is known to be nonzero.
8870 For floating point we further ensure that T is not denormal.
8871 Similar logic is present in nonzero_address in rtlanal.h */
8874 tree_expr_nonzero_p (tree t)
8876 tree type = TREE_TYPE (t);
8878 /* Doing something useful for floating point would need more work. */
8879 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
8882 switch (TREE_CODE (t))
8885 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8886 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8889 return !integer_zerop (t);
8892 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8894 /* With the presence of negative values it is hard
8895 to say something. */
8896 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8897 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8899 /* One of operands must be positive and the other non-negative. */
8900 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8901 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8906 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
8908 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8909 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8915 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8916 tree outer_type = TREE_TYPE (t);
8918 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
8919 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
8924 /* Weak declarations may link to NULL. */
8925 if (DECL_P (TREE_OPERAND (t, 0)))
8926 return !DECL_WEAK (TREE_OPERAND (t, 0));
8927 /* Constants and all other cases are never weak. */
8931 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8932 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
8935 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
8936 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
8939 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
8941 /* When both operands are nonzero, then MAX must be too. */
8942 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
8945 /* MAX where operand 0 is positive is positive. */
8946 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8948 /* MAX where operand 1 is positive is positive. */
8949 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8950 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
8957 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
8960 case NON_LVALUE_EXPR:
8961 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8964 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
8965 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
8973 /* Return true if `r' is known to be non-negative.
8974 Only handles constants at the moment. */
8977 rtl_expr_nonnegative_p (rtx r)
8979 switch (GET_CODE (r))
8982 return INTVAL (r) >= 0;
8985 if (GET_MODE (r) == VOIDmode)
8986 return CONST_DOUBLE_HIGH (r) >= 0;
8994 units = CONST_VECTOR_NUNITS (r);
8996 for (i = 0; i < units; ++i)
8998 elt = CONST_VECTOR_ELT (r, i);
8999 if (!rtl_expr_nonnegative_p (elt))
9008 /* These are always nonnegative. */
9016 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9017 an integer constant or real constant.
9019 TYPE is the type of the result. */
9022 fold_negate_const (tree arg0, tree type)
9026 if (TREE_CODE (arg0) == INTEGER_CST)
9028 unsigned HOST_WIDE_INT low;
9030 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9031 TREE_INT_CST_HIGH (arg0),
9033 t = build_int_2 (low, high);
9034 TREE_TYPE (t) = type;
9036 = (TREE_OVERFLOW (arg0)
9037 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
9038 TREE_CONSTANT_OVERFLOW (t)
9039 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9041 else if (TREE_CODE (arg0) == REAL_CST)
9042 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9043 #ifdef ENABLE_CHECKING
9051 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9052 an integer constant or real constant.
9054 TYPE is the type of the result. */
9057 fold_abs_const (tree arg0, tree type)
9061 if (TREE_CODE (arg0) == INTEGER_CST)
9063 /* If the value is unsigned, then the absolute value is
9064 the same as the ordinary value. */
9065 if (TYPE_UNSIGNED (type))
9067 /* Similarly, if the value is non-negative. */
9068 else if (INT_CST_LT (integer_minus_one_node, arg0))
9070 /* If the value is negative, then the absolute value is
9074 unsigned HOST_WIDE_INT low;
9076 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9077 TREE_INT_CST_HIGH (arg0),
9079 t = build_int_2 (low, high);
9080 TREE_TYPE (t) = type;
9082 = (TREE_OVERFLOW (arg0)
9083 | force_fit_type (t, overflow));
9084 TREE_CONSTANT_OVERFLOW (t)
9085 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9089 else if (TREE_CODE (arg0) == REAL_CST)
9091 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9092 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9096 #ifdef ENABLE_CHECKING
9104 /* Given CODE, a relational operator, the target type, TYPE and two
9105 constant operands OP0 and OP1, return the result of the
9106 relational operation. If the result is not a compile time
9107 constant, then return NULL_TREE. */
9110 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9115 /* From here on, the only cases we handle are when the result is
9116 known to be a constant.
9118 To compute GT, swap the arguments and do LT.
9119 To compute GE, do LT and invert the result.
9120 To compute LE, swap the arguments, do LT and invert the result.
9121 To compute NE, do EQ and invert the result.
9123 Therefore, the code below must handle only EQ and LT. */
9125 if (code == LE_EXPR || code == GT_EXPR)
9127 tem = op0, op0 = op1, op1 = tem;
9128 code = swap_tree_comparison (code);
9131 /* Note that it is safe to invert for real values here because we
9132 will check below in the one case that it matters. */
9136 if (code == NE_EXPR || code == GE_EXPR)
9139 code = invert_tree_comparison (code);
9142 /* Compute a result for LT or EQ if args permit;
9143 Otherwise return T. */
9144 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9146 if (code == EQ_EXPR)
9147 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
9149 tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
9150 ? INT_CST_LT_UNSIGNED (op0, op1)
9151 : INT_CST_LT (op0, op1)),
9155 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
9156 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
9157 tem = build_int_2 (0, 0);
9159 /* Two real constants can be compared explicitly. */
9160 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
9162 /* If either operand is a NaN, the result is false with two
9163 exceptions: First, an NE_EXPR is true on NaNs, but that case
9164 is already handled correctly since we will be inverting the
9165 result for NE_EXPR. Second, if we had inverted a LE_EXPR
9166 or a GE_EXPR into a LT_EXPR, we must return true so that it
9167 will be inverted into false. */
9169 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
9170 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
9171 tem = build_int_2 (invert && code == LT_EXPR, 0);
9173 else if (code == EQ_EXPR)
9174 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
9175 TREE_REAL_CST (op1)),
9178 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
9179 TREE_REAL_CST (op1)),
9183 if (tem == NULL_TREE)
9187 TREE_INT_CST_LOW (tem) ^= 1;
9189 TREE_TYPE (tem) = type;
9190 if (TREE_CODE (type) == BOOLEAN_TYPE)
9191 return (*lang_hooks.truthvalue_conversion) (tem);
9195 #include "gt-fold-const.h"