1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_relational_hi_lo (enum tree_code *, const tree,
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* Make the integer constant T valid for its type by setting to 0 or 1 all
188 the bits in the constant that don't belong in the type.
190 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
191 nonzero, a signed overflow has already occurred in calculating T, so
195 force_fit_type (tree t, int overflow)
197 unsigned HOST_WIDE_INT low;
201 if (TREE_CODE (t) == REAL_CST)
203 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
204 Consider doing it via real_convert now. */
208 else if (TREE_CODE (t) != INTEGER_CST)
211 low = TREE_INT_CST_LOW (t);
212 high = TREE_INT_CST_HIGH (t);
214 if (POINTER_TYPE_P (TREE_TYPE (t))
215 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
218 prec = TYPE_PRECISION (TREE_TYPE (t));
220 /* First clear all bits that are beyond the type's precision. */
222 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
224 else if (prec > HOST_BITS_PER_WIDE_INT)
225 TREE_INT_CST_HIGH (t)
226 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
229 TREE_INT_CST_HIGH (t) = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
234 /* Unsigned types do not suffer sign extension or overflow unless they
236 if (TYPE_UNSIGNED (TREE_TYPE (t))
237 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
238 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
241 /* If the value's sign bit is set, extend the sign. */
242 if (prec != 2 * HOST_BITS_PER_WIDE_INT
243 && (prec > HOST_BITS_PER_WIDE_INT
244 ? 0 != (TREE_INT_CST_HIGH (t)
246 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
247 : 0 != (TREE_INT_CST_LOW (t)
248 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
250 /* Value is negative:
251 set to 1 all the bits that are outside this type's precision. */
252 if (prec > HOST_BITS_PER_WIDE_INT)
253 TREE_INT_CST_HIGH (t)
254 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
257 TREE_INT_CST_HIGH (t) = -1;
258 if (prec < HOST_BITS_PER_WIDE_INT)
259 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
263 /* Return nonzero if signed overflow occurred. */
265 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
269 /* Add two doubleword integers with doubleword result.
270 Each argument is given as two `HOST_WIDE_INT' pieces.
271 One argument is L1 and H1; the other, L2 and H2.
272 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
275 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
276 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
277 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
279 unsigned HOST_WIDE_INT l;
283 h = h1 + h2 + (l < l1);
287 return OVERFLOW_SUM_SIGN (h1, h2, h);
290 /* Negate a doubleword integer with doubleword result.
291 Return nonzero if the operation overflows, assuming it's signed.
292 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
293 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
296 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
303 return (*hv & h1) < 0;
313 /* Multiply two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows, assuming it's signed.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
324 HOST_WIDE_INT arg1[4];
325 HOST_WIDE_INT arg2[4];
326 HOST_WIDE_INT prod[4 * 2];
327 unsigned HOST_WIDE_INT carry;
329 unsigned HOST_WIDE_INT toplow, neglow;
330 HOST_WIDE_INT tophigh, neghigh;
332 encode (arg1, l1, h1);
333 encode (arg2, l2, h2);
335 memset (prod, 0, sizeof prod);
337 for (i = 0; i < 4; i++)
340 for (j = 0; j < 4; j++)
343 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
344 carry += arg1[i] * arg2[j];
345 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
347 prod[k] = LOWPART (carry);
348 carry = HIGHPART (carry);
353 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
355 /* Check for overflow by calculating the top half of the answer in full;
356 it should agree with the low half's sign bit. */
357 decode (prod + 4, &toplow, &tophigh);
360 neg_double (l2, h2, &neglow, &neghigh);
361 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
365 neg_double (l1, h1, &neglow, &neghigh);
366 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
368 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
371 /* Shift the doubleword integer in L1, H1 left by COUNT places
372 keeping only PREC bits of result.
373 Shift right if COUNT is negative.
374 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
375 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
378 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
379 HOST_WIDE_INT count, unsigned int prec,
380 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
382 unsigned HOST_WIDE_INT signmask;
386 rshift_double (l1, h1, -count, prec, lv, hv, arith);
390 if (SHIFT_COUNT_TRUNCATED)
393 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
395 /* Shifting by the host word size is undefined according to the
396 ANSI standard, so we must handle this as a special case. */
400 else if (count >= HOST_BITS_PER_WIDE_INT)
402 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
407 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
408 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
412 /* Sign extend all bits that are beyond the precision. */
414 signmask = -((prec > HOST_BITS_PER_WIDE_INT
415 ? ((unsigned HOST_WIDE_INT) *hv
416 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
417 : (*lv >> (prec - 1))) & 1);
419 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
421 else if (prec >= HOST_BITS_PER_WIDE_INT)
423 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
424 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
429 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
430 *lv |= signmask << prec;
434 /* Shift the doubleword integer in L1, H1 right by COUNT places
435 keeping only PREC bits of result. COUNT must be positive.
436 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
437 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
440 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
441 HOST_WIDE_INT count, unsigned int prec,
442 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
445 unsigned HOST_WIDE_INT signmask;
448 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
451 if (SHIFT_COUNT_TRUNCATED)
454 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
456 /* Shifting by the host word size is undefined according to the
457 ANSI standard, so we must handle this as a special case. */
461 else if (count >= HOST_BITS_PER_WIDE_INT)
464 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
468 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
470 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
473 /* Zero / sign extend all bits that are beyond the precision. */
475 if (count >= (HOST_WIDE_INT)prec)
480 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
482 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
484 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
485 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
490 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
491 *lv |= signmask << (prec - count);
495 /* Rotate the doubleword integer in L1, H1 left by COUNT places
496 keeping only PREC bits of result.
497 Rotate right if COUNT is negative.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
501 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
502 HOST_WIDE_INT count, unsigned int prec,
503 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
505 unsigned HOST_WIDE_INT s1l, s2l;
506 HOST_WIDE_INT s1h, s2h;
512 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
513 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
518 /* Rotate the doubleword integer in L1, H1 left by COUNT places
519 keeping only PREC bits of result. COUNT must be positive.
520 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
523 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
524 HOST_WIDE_INT count, unsigned int prec,
525 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
527 unsigned HOST_WIDE_INT s1l, s2l;
528 HOST_WIDE_INT s1h, s2h;
534 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
535 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
540 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
541 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
542 CODE is a tree code for a kind of division, one of
543 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
545 It controls how the quotient is rounded to an integer.
546 Return nonzero if the operation overflows.
547 UNS nonzero says do unsigned division. */
550 div_and_round_double (enum tree_code code, int uns,
551 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
552 HOST_WIDE_INT hnum_orig,
553 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
554 HOST_WIDE_INT hden_orig,
555 unsigned HOST_WIDE_INT *lquo,
556 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
560 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
561 HOST_WIDE_INT den[4], quo[4];
563 unsigned HOST_WIDE_INT work;
564 unsigned HOST_WIDE_INT carry = 0;
565 unsigned HOST_WIDE_INT lnum = lnum_orig;
566 HOST_WIDE_INT hnum = hnum_orig;
567 unsigned HOST_WIDE_INT lden = lden_orig;
568 HOST_WIDE_INT hden = hden_orig;
571 if (hden == 0 && lden == 0)
572 overflow = 1, lden = 1;
574 /* Calculate quotient sign and convert operands to unsigned. */
580 /* (minimum integer) / (-1) is the only overflow case. */
581 if (neg_double (lnum, hnum, &lnum, &hnum)
582 && ((HOST_WIDE_INT) lden & hden) == -1)
588 neg_double (lden, hden, &lden, &hden);
592 if (hnum == 0 && hden == 0)
593 { /* single precision */
595 /* This unsigned division rounds toward zero. */
601 { /* trivial case: dividend < divisor */
602 /* hden != 0 already checked. */
609 memset (quo, 0, sizeof quo);
611 memset (num, 0, sizeof num); /* to zero 9th element */
612 memset (den, 0, sizeof den);
614 encode (num, lnum, hnum);
615 encode (den, lden, hden);
617 /* Special code for when the divisor < BASE. */
618 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
620 /* hnum != 0 already checked. */
621 for (i = 4 - 1; i >= 0; i--)
623 work = num[i] + carry * BASE;
624 quo[i] = work / lden;
630 /* Full double precision division,
631 with thanks to Don Knuth's "Seminumerical Algorithms". */
632 int num_hi_sig, den_hi_sig;
633 unsigned HOST_WIDE_INT quo_est, scale;
635 /* Find the highest nonzero divisor digit. */
636 for (i = 4 - 1;; i--)
643 /* Insure that the first digit of the divisor is at least BASE/2.
644 This is required by the quotient digit estimation algorithm. */
646 scale = BASE / (den[den_hi_sig] + 1);
648 { /* scale divisor and dividend */
650 for (i = 0; i <= 4 - 1; i++)
652 work = (num[i] * scale) + carry;
653 num[i] = LOWPART (work);
654 carry = HIGHPART (work);
659 for (i = 0; i <= 4 - 1; i++)
661 work = (den[i] * scale) + carry;
662 den[i] = LOWPART (work);
663 carry = HIGHPART (work);
664 if (den[i] != 0) den_hi_sig = i;
671 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
673 /* Guess the next quotient digit, quo_est, by dividing the first
674 two remaining dividend digits by the high order quotient digit.
675 quo_est is never low and is at most 2 high. */
676 unsigned HOST_WIDE_INT tmp;
678 num_hi_sig = i + den_hi_sig + 1;
679 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
680 if (num[num_hi_sig] != den[den_hi_sig])
681 quo_est = work / den[den_hi_sig];
685 /* Refine quo_est so it's usually correct, and at most one high. */
686 tmp = work - quo_est * den[den_hi_sig];
688 && (den[den_hi_sig - 1] * quo_est
689 > (tmp * BASE + num[num_hi_sig - 2])))
692 /* Try QUO_EST as the quotient digit, by multiplying the
693 divisor by QUO_EST and subtracting from the remaining dividend.
694 Keep in mind that QUO_EST is the I - 1st digit. */
697 for (j = 0; j <= den_hi_sig; j++)
699 work = quo_est * den[j] + carry;
700 carry = HIGHPART (work);
701 work = num[i + j] - LOWPART (work);
702 num[i + j] = LOWPART (work);
703 carry += HIGHPART (work) != 0;
706 /* If quo_est was high by one, then num[i] went negative and
707 we need to correct things. */
708 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
711 carry = 0; /* add divisor back in */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = num[i + j] + den[j] + carry;
715 carry = HIGHPART (work);
716 num[i + j] = LOWPART (work);
719 num [num_hi_sig] += carry;
722 /* Store the quotient digit. */
727 decode (quo, lquo, hquo);
730 /* If result is negative, make it so. */
732 neg_double (*lquo, *hquo, lquo, hquo);
734 /* Compute trial remainder: rem = num - (quo * den) */
735 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
736 neg_double (*lrem, *hrem, lrem, hrem);
737 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
742 case TRUNC_MOD_EXPR: /* round toward zero */
743 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
747 case FLOOR_MOD_EXPR: /* round toward negative infinity */
748 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
751 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
759 case CEIL_MOD_EXPR: /* round toward positive infinity */
760 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
762 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
770 case ROUND_MOD_EXPR: /* round to closest integer */
772 unsigned HOST_WIDE_INT labs_rem = *lrem;
773 HOST_WIDE_INT habs_rem = *hrem;
774 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
775 HOST_WIDE_INT habs_den = hden, htwice;
777 /* Get absolute values. */
779 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
781 neg_double (lden, hden, &labs_den, &habs_den);
783 /* If (2 * abs (lrem) >= abs (lden)) */
784 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
785 labs_rem, habs_rem, <wice, &htwice);
787 if (((unsigned HOST_WIDE_INT) habs_den
788 < (unsigned HOST_WIDE_INT) htwice)
789 || (((unsigned HOST_WIDE_INT) habs_den
790 == (unsigned HOST_WIDE_INT) htwice)
791 && (labs_den < ltwice)))
795 add_double (*lquo, *hquo,
796 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
811 /* Compute true remainder: rem = num - (quo * den) */
812 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
813 neg_double (*lrem, *hrem, lrem, hrem);
814 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
818 /* Return true if built-in mathematical function specified by CODE
819 preserves the sign of it argument, i.e. -f(x) == f(-x). */
822 negate_mathfn_p (enum built_in_function code)
846 /* Determine whether an expression T can be cheaply negated using
847 the function negate_expr. */
850 negate_expr_p (tree t)
852 unsigned HOST_WIDE_INT val;
859 type = TREE_TYPE (t);
862 switch (TREE_CODE (t))
865 if (TYPE_UNSIGNED (type) || ! flag_trapv)
868 /* Check that -CST will not overflow type. */
869 prec = TYPE_PRECISION (type);
870 if (prec > HOST_BITS_PER_WIDE_INT)
872 if (TREE_INT_CST_LOW (t) != 0)
874 prec -= HOST_BITS_PER_WIDE_INT;
875 val = TREE_INT_CST_HIGH (t);
878 val = TREE_INT_CST_LOW (t);
879 if (prec < HOST_BITS_PER_WIDE_INT)
880 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
881 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
888 return negate_expr_p (TREE_REALPART (t))
889 && negate_expr_p (TREE_IMAGPART (t));
892 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
894 /* -(A + B) -> (-B) - A. */
895 if (negate_expr_p (TREE_OPERAND (t, 1))
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1)))
899 /* -(A + B) -> (-A) - B. */
900 return negate_expr_p (TREE_OPERAND (t, 0));
903 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
904 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 && reorder_operands_p (TREE_OPERAND (t, 0),
906 TREE_OPERAND (t, 1));
909 if (TYPE_UNSIGNED (TREE_TYPE (t)))
915 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
916 return negate_expr_p (TREE_OPERAND (t, 1))
917 || negate_expr_p (TREE_OPERAND (t, 0));
921 /* Negate -((double)float) as (double)(-float). */
922 if (TREE_CODE (type) == REAL_TYPE)
924 tree tem = strip_float_extensions (t);
926 return negate_expr_p (tem);
931 /* Negate -f(x) as f(-x). */
932 if (negate_mathfn_p (builtin_mathfn_code (t)))
933 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
937 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
938 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
940 tree op1 = TREE_OPERAND (t, 1);
941 if (TREE_INT_CST_HIGH (op1) == 0
942 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
943 == TREE_INT_CST_LOW (op1))
954 /* Given T, an expression, return the negation of T. Allow for T to be
955 null, in which case return null. */
966 type = TREE_TYPE (t);
969 switch (TREE_CODE (t))
972 tem = fold_negate_const (t, type);
973 if (! TREE_OVERFLOW (tem)
974 || TYPE_UNSIGNED (type)
980 tem = fold_negate_const (t, type);
981 /* Two's complement FP formats, such as c4x, may overflow. */
982 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
983 return fold_convert (type, tem);
988 tree rpart = negate_expr (TREE_REALPART (t));
989 tree ipart = negate_expr (TREE_IMAGPART (t));
991 if ((TREE_CODE (rpart) == REAL_CST
992 && TREE_CODE (ipart) == REAL_CST)
993 || (TREE_CODE (rpart) == INTEGER_CST
994 && TREE_CODE (ipart) == INTEGER_CST))
995 return build_complex (type, rpart, ipart);
1000 return fold_convert (type, TREE_OPERAND (t, 0));
1003 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1005 /* -(A + B) -> (-B) - A. */
1006 if (negate_expr_p (TREE_OPERAND (t, 1))
1007 && reorder_operands_p (TREE_OPERAND (t, 0),
1008 TREE_OPERAND (t, 1)))
1010 tem = negate_expr (TREE_OPERAND (t, 1));
1011 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1012 tem, TREE_OPERAND (t, 0)));
1013 return fold_convert (type, tem);
1016 /* -(A + B) -> (-A) - B. */
1017 if (negate_expr_p (TREE_OPERAND (t, 0)))
1019 tem = negate_expr (TREE_OPERAND (t, 0));
1020 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1021 tem, TREE_OPERAND (t, 1)));
1022 return fold_convert (type, tem);
1028 /* - (A - B) -> B - A */
1029 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1030 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1031 return fold_convert (type,
1032 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1033 TREE_OPERAND (t, 1),
1034 TREE_OPERAND (t, 0))));
1038 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1044 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1046 tem = TREE_OPERAND (t, 1);
1047 if (negate_expr_p (tem))
1048 return fold_convert (type,
1049 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1050 TREE_OPERAND (t, 0),
1051 negate_expr (tem))));
1052 tem = TREE_OPERAND (t, 0);
1053 if (negate_expr_p (tem))
1054 return fold_convert (type,
1055 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1057 TREE_OPERAND (t, 1))));
1062 /* Convert -((double)float) into (double)(-float). */
1063 if (TREE_CODE (type) == REAL_TYPE)
1065 tem = strip_float_extensions (t);
1066 if (tem != t && negate_expr_p (tem))
1067 return fold_convert (type, negate_expr (tem));
1072 /* Negate -f(x) as f(-x). */
1073 if (negate_mathfn_p (builtin_mathfn_code (t))
1074 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1076 tree fndecl, arg, arglist;
1078 fndecl = get_callee_fndecl (t);
1079 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1080 arglist = build_tree_list (NULL_TREE, arg);
1081 return build_function_call_expr (fndecl, arglist);
1086 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1087 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1089 tree op1 = TREE_OPERAND (t, 1);
1090 if (TREE_INT_CST_HIGH (op1) == 0
1091 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1092 == TREE_INT_CST_LOW (op1))
1094 tree ntype = TYPE_UNSIGNED (type)
1095 ? lang_hooks.types.signed_type (type)
1096 : lang_hooks.types.unsigned_type (type);
1097 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1098 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1099 return fold_convert (type, temp);
1108 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1109 return fold_convert (type, tem);
1112 /* Split a tree IN into a constant, literal and variable parts that could be
1113 combined with CODE to make IN. "constant" means an expression with
1114 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1115 commutative arithmetic operation. Store the constant part into *CONP,
1116 the literal in *LITP and return the variable part. If a part isn't
1117 present, set it to null. If the tree does not decompose in this way,
1118 return the entire tree as the variable part and the other parts as null.
1120 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1121 case, we negate an operand that was subtracted. Except if it is a
1122 literal for which we use *MINUS_LITP instead.
1124 If NEGATE_P is true, we are negating all of IN, again except a literal
1125 for which we use *MINUS_LITP instead.
1127 If IN is itself a literal or constant, return it as appropriate.
1129 Note that we do not guarantee that any of the three values will be the
1130 same type as IN, but they will have the same signedness and mode. */
1133 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1134 tree *minus_litp, int negate_p)
1142 /* Strip any conversions that don't change the machine mode or signedness. */
1143 STRIP_SIGN_NOPS (in);
1145 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1147 else if (TREE_CODE (in) == code
1148 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1149 /* We can associate addition and subtraction together (even
1150 though the C standard doesn't say so) for integers because
1151 the value is not affected. For reals, the value might be
1152 affected, so we can't. */
1153 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1154 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1156 tree op0 = TREE_OPERAND (in, 0);
1157 tree op1 = TREE_OPERAND (in, 1);
1158 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1159 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1161 /* First see if either of the operands is a literal, then a constant. */
1162 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1163 *litp = op0, op0 = 0;
1164 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1165 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1167 if (op0 != 0 && TREE_CONSTANT (op0))
1168 *conp = op0, op0 = 0;
1169 else if (op1 != 0 && TREE_CONSTANT (op1))
1170 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1172 /* If we haven't dealt with either operand, this is not a case we can
1173 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1174 if (op0 != 0 && op1 != 0)
1179 var = op1, neg_var_p = neg1_p;
1181 /* Now do any needed negations. */
1183 *minus_litp = *litp, *litp = 0;
1185 *conp = negate_expr (*conp);
1187 var = negate_expr (var);
1189 else if (TREE_CONSTANT (in))
1197 *minus_litp = *litp, *litp = 0;
1198 else if (*minus_litp)
1199 *litp = *minus_litp, *minus_litp = 0;
1200 *conp = negate_expr (*conp);
1201 var = negate_expr (var);
1207 /* Re-associate trees split by the above function. T1 and T2 are either
1208 expressions to associate or null. Return the new expression, if any. If
1209 we build an operation, do it in TYPE and with CODE. */
1212 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1219 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1220 try to fold this since we will have infinite recursion. But do
1221 deal with any NEGATE_EXPRs. */
1222 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1223 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1225 if (code == PLUS_EXPR)
1227 if (TREE_CODE (t1) == NEGATE_EXPR)
1228 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1229 fold_convert (type, TREE_OPERAND (t1, 0)));
1230 else if (TREE_CODE (t2) == NEGATE_EXPR)
1231 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1232 fold_convert (type, TREE_OPERAND (t2, 0)));
1234 return build2 (code, type, fold_convert (type, t1),
1235 fold_convert (type, t2));
1238 return fold (build2 (code, type, fold_convert (type, t1),
1239 fold_convert (type, t2)));
1242 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1243 to produce a new constant.
1245 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1248 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1250 unsigned HOST_WIDE_INT int1l, int2l;
1251 HOST_WIDE_INT int1h, int2h;
1252 unsigned HOST_WIDE_INT low;
1254 unsigned HOST_WIDE_INT garbagel;
1255 HOST_WIDE_INT garbageh;
1257 tree type = TREE_TYPE (arg1);
1258 int uns = TYPE_UNSIGNED (type);
1260 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1262 int no_overflow = 0;
1264 int1l = TREE_INT_CST_LOW (arg1);
1265 int1h = TREE_INT_CST_HIGH (arg1);
1266 int2l = TREE_INT_CST_LOW (arg2);
1267 int2h = TREE_INT_CST_HIGH (arg2);
1272 low = int1l | int2l, hi = int1h | int2h;
1276 low = int1l ^ int2l, hi = int1h ^ int2h;
1280 low = int1l & int2l, hi = int1h & int2h;
1286 /* It's unclear from the C standard whether shifts can overflow.
1287 The following code ignores overflow; perhaps a C standard
1288 interpretation ruling is needed. */
1289 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1297 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1302 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1306 neg_double (int2l, int2h, &low, &hi);
1307 add_double (int1l, int1h, low, hi, &low, &hi);
1308 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1312 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1315 case TRUNC_DIV_EXPR:
1316 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1317 case EXACT_DIV_EXPR:
1318 /* This is a shortcut for a common special case. */
1319 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1320 && ! TREE_CONSTANT_OVERFLOW (arg1)
1321 && ! TREE_CONSTANT_OVERFLOW (arg2)
1322 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1324 if (code == CEIL_DIV_EXPR)
1327 low = int1l / int2l, hi = 0;
1331 /* ... fall through ... */
1333 case ROUND_DIV_EXPR:
1334 if (int2h == 0 && int2l == 1)
1336 low = int1l, hi = int1h;
1339 if (int1l == int2l && int1h == int2h
1340 && ! (int1l == 0 && int1h == 0))
1345 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1346 &low, &hi, &garbagel, &garbageh);
1349 case TRUNC_MOD_EXPR:
1350 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1351 /* This is a shortcut for a common special case. */
1352 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1353 && ! TREE_CONSTANT_OVERFLOW (arg1)
1354 && ! TREE_CONSTANT_OVERFLOW (arg2)
1355 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1357 if (code == CEIL_MOD_EXPR)
1359 low = int1l % int2l, hi = 0;
1363 /* ... fall through ... */
1365 case ROUND_MOD_EXPR:
1366 overflow = div_and_round_double (code, uns,
1367 int1l, int1h, int2l, int2h,
1368 &garbagel, &garbageh, &low, &hi);
1374 low = (((unsigned HOST_WIDE_INT) int1h
1375 < (unsigned HOST_WIDE_INT) int2h)
1376 || (((unsigned HOST_WIDE_INT) int1h
1377 == (unsigned HOST_WIDE_INT) int2h)
1380 low = (int1h < int2h
1381 || (int1h == int2h && int1l < int2l));
1383 if (low == (code == MIN_EXPR))
1384 low = int1l, hi = int1h;
1386 low = int2l, hi = int2h;
1393 /* If this is for a sizetype, can be represented as one (signed)
1394 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1397 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1398 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1399 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1400 return size_int_type_wide (low, type);
1403 t = build_int_2 (low, hi);
1404 TREE_TYPE (t) = TREE_TYPE (arg1);
1409 ? (!uns || is_sizetype) && overflow
1410 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1412 | TREE_OVERFLOW (arg1)
1413 | TREE_OVERFLOW (arg2));
1415 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1416 So check if force_fit_type truncated the value. */
1418 && ! TREE_OVERFLOW (t)
1419 && (TREE_INT_CST_HIGH (t) != hi
1420 || TREE_INT_CST_LOW (t) != low))
1421 TREE_OVERFLOW (t) = 1;
1423 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1424 | TREE_CONSTANT_OVERFLOW (arg1)
1425 | TREE_CONSTANT_OVERFLOW (arg2));
1429 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1430 constant. We assume ARG1 and ARG2 have the same data type, or at least
1431 are the same kind of constant and the same machine mode.
1433 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1436 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1441 if (TREE_CODE (arg1) == INTEGER_CST)
1442 return int_const_binop (code, arg1, arg2, notrunc);
1444 if (TREE_CODE (arg1) == REAL_CST)
1446 enum machine_mode mode;
1449 REAL_VALUE_TYPE value;
1452 d1 = TREE_REAL_CST (arg1);
1453 d2 = TREE_REAL_CST (arg2);
1455 type = TREE_TYPE (arg1);
1456 mode = TYPE_MODE (type);
1458 /* Don't perform operation if we honor signaling NaNs and
1459 either operand is a NaN. */
1460 if (HONOR_SNANS (mode)
1461 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1464 /* Don't perform operation if it would raise a division
1465 by zero exception. */
1466 if (code == RDIV_EXPR
1467 && REAL_VALUES_EQUAL (d2, dconst0)
1468 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1471 /* If either operand is a NaN, just return it. Otherwise, set up
1472 for floating-point trap; we return an overflow. */
1473 if (REAL_VALUE_ISNAN (d1))
1475 else if (REAL_VALUE_ISNAN (d2))
1478 REAL_ARITHMETIC (value, code, d1, d2);
1480 t = build_real (type, real_value_truncate (mode, value));
1483 = (force_fit_type (t, 0)
1484 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1485 TREE_CONSTANT_OVERFLOW (t)
1487 | TREE_CONSTANT_OVERFLOW (arg1)
1488 | TREE_CONSTANT_OVERFLOW (arg2);
1491 if (TREE_CODE (arg1) == COMPLEX_CST)
1493 tree type = TREE_TYPE (arg1);
1494 tree r1 = TREE_REALPART (arg1);
1495 tree i1 = TREE_IMAGPART (arg1);
1496 tree r2 = TREE_REALPART (arg2);
1497 tree i2 = TREE_IMAGPART (arg2);
1503 t = build_complex (type,
1504 const_binop (PLUS_EXPR, r1, r2, notrunc),
1505 const_binop (PLUS_EXPR, i1, i2, notrunc));
1509 t = build_complex (type,
1510 const_binop (MINUS_EXPR, r1, r2, notrunc),
1511 const_binop (MINUS_EXPR, i1, i2, notrunc));
1515 t = build_complex (type,
1516 const_binop (MINUS_EXPR,
1517 const_binop (MULT_EXPR,
1519 const_binop (MULT_EXPR,
1522 const_binop (PLUS_EXPR,
1523 const_binop (MULT_EXPR,
1525 const_binop (MULT_EXPR,
1533 = const_binop (PLUS_EXPR,
1534 const_binop (MULT_EXPR, r2, r2, notrunc),
1535 const_binop (MULT_EXPR, i2, i2, notrunc),
1538 t = build_complex (type,
1540 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1541 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1542 const_binop (PLUS_EXPR,
1543 const_binop (MULT_EXPR, r1, r2,
1545 const_binop (MULT_EXPR, i1, i2,
1548 magsquared, notrunc),
1550 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1551 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1552 const_binop (MINUS_EXPR,
1553 const_binop (MULT_EXPR, i1, r2,
1555 const_binop (MULT_EXPR, r1, i2,
1558 magsquared, notrunc));
1570 /* These are the hash table functions for the hash table of INTEGER_CST
1571 nodes of a sizetype. */
1573 /* Return the hash code code X, an INTEGER_CST. */
1576 size_htab_hash (const void *x)
1580 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1581 ^ htab_hash_pointer (TREE_TYPE (t))
1582 ^ (TREE_OVERFLOW (t) << 20));
1585 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1586 is the same as that given by *Y, which is the same. */
1589 size_htab_eq (const void *x, const void *y)
1594 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1595 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1596 && TREE_TYPE (xt) == TREE_TYPE (yt)
1597 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1600 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1601 bits are given by NUMBER and of the sizetype represented by KIND. */
1604 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1606 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1609 /* Likewise, but the desired type is specified explicitly. */
1611 static GTY (()) tree new_const;
1612 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1616 size_int_type_wide (HOST_WIDE_INT number, tree type)
1622 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1623 new_const = make_node (INTEGER_CST);
1626 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1627 hash table, we return the value from the hash table. Otherwise, we
1628 place that in the hash table and make a new node for the next time. */
1629 TREE_INT_CST_LOW (new_const) = number;
1630 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1631 TREE_TYPE (new_const) = type;
1632 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1633 = force_fit_type (new_const, 0);
1635 slot = htab_find_slot (size_htab, new_const, INSERT);
1641 new_const = make_node (INTEGER_CST);
1645 return (tree) *slot;
1648 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1649 is a tree code. The type of the result is taken from the operands.
1650 Both must be the same type integer type and it must be a size type.
1651 If the operands are constant, so is the result. */
1654 size_binop (enum tree_code code, tree arg0, tree arg1)
1656 tree type = TREE_TYPE (arg0);
1658 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1659 || type != TREE_TYPE (arg1))
1662 /* Handle the special case of two integer constants faster. */
1663 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1665 /* And some specific cases even faster than that. */
1666 if (code == PLUS_EXPR && integer_zerop (arg0))
1668 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1669 && integer_zerop (arg1))
1671 else if (code == MULT_EXPR && integer_onep (arg0))
1674 /* Handle general case of two integer constants. */
1675 return int_const_binop (code, arg0, arg1, 0);
1678 if (arg0 == error_mark_node || arg1 == error_mark_node)
1679 return error_mark_node;
1681 return fold (build2 (code, type, arg0, arg1));
1684 /* Given two values, either both of sizetype or both of bitsizetype,
1685 compute the difference between the two values. Return the value
1686 in signed type corresponding to the type of the operands. */
1689 size_diffop (tree arg0, tree arg1)
1691 tree type = TREE_TYPE (arg0);
1694 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1695 || type != TREE_TYPE (arg1))
1698 /* If the type is already signed, just do the simple thing. */
1699 if (!TYPE_UNSIGNED (type))
1700 return size_binop (MINUS_EXPR, arg0, arg1);
1702 ctype = (type == bitsizetype || type == ubitsizetype
1703 ? sbitsizetype : ssizetype);
1705 /* If either operand is not a constant, do the conversions to the signed
1706 type and subtract. The hardware will do the right thing with any
1707 overflow in the subtraction. */
1708 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1709 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1710 fold_convert (ctype, arg1));
1712 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1713 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1714 overflow) and negate (which can't either). Special-case a result
1715 of zero while we're here. */
1716 if (tree_int_cst_equal (arg0, arg1))
1717 return fold_convert (ctype, integer_zero_node);
1718 else if (tree_int_cst_lt (arg1, arg0))
1719 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1721 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1722 fold_convert (ctype, size_binop (MINUS_EXPR,
1727 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1728 type TYPE. If no simplification can be done return NULL_TREE. */
1731 fold_convert_const (enum tree_code code, tree type, tree arg1)
1736 if (TREE_TYPE (arg1) == type)
1739 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1741 if (TREE_CODE (arg1) == INTEGER_CST)
1743 /* If we would build a constant wider than GCC supports,
1744 leave the conversion unfolded. */
1745 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1748 /* If we are trying to make a sizetype for a small integer, use
1749 size_int to pick up cached types to reduce duplicate nodes. */
1750 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1751 && !TREE_CONSTANT_OVERFLOW (arg1)
1752 && compare_tree_int (arg1, 10000) < 0)
1753 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1755 /* Given an integer constant, make new constant with new type,
1756 appropriately sign-extended or truncated. */
1757 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1758 TREE_INT_CST_HIGH (arg1));
1759 TREE_TYPE (t) = type;
1760 /* Indicate an overflow if (1) ARG1 already overflowed,
1761 or (2) force_fit_type indicates an overflow.
1762 Tell force_fit_type that an overflow has already occurred
1763 if ARG1 is a too-large unsigned value and T is signed.
1764 But don't indicate an overflow if converting a pointer. */
1766 = ((force_fit_type (t,
1767 (TREE_INT_CST_HIGH (arg1) < 0
1768 && (TYPE_UNSIGNED (type)
1769 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1770 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1771 || TREE_OVERFLOW (arg1));
1772 TREE_CONSTANT_OVERFLOW (t)
1773 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1776 else if (TREE_CODE (arg1) == REAL_CST)
1778 /* The following code implements the floating point to integer
1779 conversion rules required by the Java Language Specification,
1780 that IEEE NaNs are mapped to zero and values that overflow
1781 the target precision saturate, i.e. values greater than
1782 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1783 are mapped to INT_MIN. These semantics are allowed by the
1784 C and C++ standards that simply state that the behavior of
1785 FP-to-integer conversion is unspecified upon overflow. */
1787 HOST_WIDE_INT high, low;
1790 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1794 case FIX_TRUNC_EXPR:
1795 real_trunc (&r, VOIDmode, &x);
1799 real_ceil (&r, VOIDmode, &x);
1802 case FIX_FLOOR_EXPR:
1803 real_floor (&r, VOIDmode, &x);
1806 case FIX_ROUND_EXPR:
1807 real_round (&r, VOIDmode, &x);
1814 /* If R is NaN, return zero and show we have an overflow. */
1815 if (REAL_VALUE_ISNAN (r))
1822 /* See if R is less than the lower bound or greater than the
1827 tree lt = TYPE_MIN_VALUE (type);
1828 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1829 if (REAL_VALUES_LESS (r, l))
1832 high = TREE_INT_CST_HIGH (lt);
1833 low = TREE_INT_CST_LOW (lt);
1839 tree ut = TYPE_MAX_VALUE (type);
1842 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1843 if (REAL_VALUES_LESS (u, r))
1846 high = TREE_INT_CST_HIGH (ut);
1847 low = TREE_INT_CST_LOW (ut);
1853 REAL_VALUE_TO_INT (&low, &high, r);
1855 t = build_int_2 (low, high);
1856 TREE_TYPE (t) = type;
1858 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1859 TREE_CONSTANT_OVERFLOW (t)
1860 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1864 else if (TREE_CODE (type) == REAL_TYPE)
1866 if (TREE_CODE (arg1) == INTEGER_CST)
1867 return build_real_from_int_cst (type, arg1);
1868 if (TREE_CODE (arg1) == REAL_CST)
1870 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1872 /* We make a copy of ARG1 so that we don't modify an
1873 existing constant tree. */
1874 t = copy_node (arg1);
1875 TREE_TYPE (t) = type;
1879 t = build_real (type,
1880 real_value_truncate (TYPE_MODE (type),
1881 TREE_REAL_CST (arg1)));
1884 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1885 TREE_CONSTANT_OVERFLOW (t)
1886 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1893 /* Convert expression ARG to type TYPE. Used by the middle-end for
1894 simple conversions in preference to calling the front-end's convert. */
1897 fold_convert (tree type, tree arg)
1899 tree orig = TREE_TYPE (arg);
1905 if (TREE_CODE (arg) == ERROR_MARK
1906 || TREE_CODE (type) == ERROR_MARK
1907 || TREE_CODE (orig) == ERROR_MARK)
1908 return error_mark_node;
1910 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1911 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1912 TYPE_MAIN_VARIANT (orig)))
1913 return fold (build1 (NOP_EXPR, type, arg));
1915 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1916 || TREE_CODE (type) == OFFSET_TYPE)
1918 if (TREE_CODE (arg) == INTEGER_CST)
1920 tem = fold_convert_const (NOP_EXPR, type, arg);
1921 if (tem != NULL_TREE)
1924 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1925 || TREE_CODE (orig) == OFFSET_TYPE)
1926 return fold (build1 (NOP_EXPR, type, arg));
1927 if (TREE_CODE (orig) == COMPLEX_TYPE)
1929 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1930 return fold_convert (type, tem);
1932 if (TREE_CODE (orig) == VECTOR_TYPE
1933 && GET_MODE_SIZE (TYPE_MODE (type))
1934 == GET_MODE_SIZE (TYPE_MODE (orig)))
1935 return fold (build1 (NOP_EXPR, type, arg));
1937 else if (TREE_CODE (type) == REAL_TYPE)
1939 if (TREE_CODE (arg) == INTEGER_CST)
1941 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1945 else if (TREE_CODE (arg) == REAL_CST)
1947 tem = fold_convert_const (NOP_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1952 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1953 return fold (build1 (FLOAT_EXPR, type, arg));
1954 if (TREE_CODE (orig) == REAL_TYPE)
1955 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1960 return fold_convert (type, tem);
1963 else if (TREE_CODE (type) == COMPLEX_TYPE)
1965 if (INTEGRAL_TYPE_P (orig)
1966 || POINTER_TYPE_P (orig)
1967 || TREE_CODE (orig) == REAL_TYPE)
1968 return build2 (COMPLEX_EXPR, type,
1969 fold_convert (TREE_TYPE (type), arg),
1970 fold_convert (TREE_TYPE (type), integer_zero_node));
1971 if (TREE_CODE (orig) == COMPLEX_TYPE)
1975 if (TREE_CODE (arg) == COMPLEX_EXPR)
1977 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1978 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1979 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1982 arg = save_expr (arg);
1983 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1984 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1985 rpart = fold_convert (TREE_TYPE (type), rpart);
1986 ipart = fold_convert (TREE_TYPE (type), ipart);
1987 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1990 else if (TREE_CODE (type) == VECTOR_TYPE)
1992 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1993 && GET_MODE_SIZE (TYPE_MODE (type))
1994 == GET_MODE_SIZE (TYPE_MODE (orig)))
1995 return fold (build1 (NOP_EXPR, type, arg));
1996 if (TREE_CODE (orig) == VECTOR_TYPE
1997 && GET_MODE_SIZE (TYPE_MODE (type))
1998 == GET_MODE_SIZE (TYPE_MODE (orig)))
1999 return fold (build1 (NOP_EXPR, type, arg));
2001 else if (VOID_TYPE_P (type))
2002 return fold (build1 (CONVERT_EXPR, type, arg));
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2011 /* We only need to wrap lvalue tree codes. */
2012 switch (TREE_CODE (x))
2024 case ARRAY_RANGE_REF:
2031 case PREINCREMENT_EXPR:
2032 case PREDECREMENT_EXPR:
2035 case TRY_CATCH_EXPR:
2036 case WITH_CLEANUP_EXPR:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2053 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2059 int pedantic_lvalues;
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2065 pedantic_non_lvalue (tree x)
2067 if (pedantic_lvalues)
2068 return non_lvalue (x);
2073 /* Given a tree comparison code, return the code that is the logical inverse
2074 of the given code. It is not safe to do this for floating-point
2075 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2076 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2078 static enum tree_code
2079 invert_tree_comparison (enum tree_code code, bool honor_nans)
2081 if (honor_nans && flag_trapping_math)
2091 return honor_nans ? UNLE_EXPR : LE_EXPR;
2093 return honor_nans ? UNLT_EXPR : LT_EXPR;
2095 return honor_nans ? UNGE_EXPR : GE_EXPR;
2097 return honor_nans ? UNGT_EXPR : GT_EXPR;
2111 return UNORDERED_EXPR;
2112 case UNORDERED_EXPR:
2113 return ORDERED_EXPR;
2119 /* Similar, but return the comparison that results if the operands are
2120 swapped. This is safe for floating-point. */
2123 swap_tree_comparison (enum tree_code code)
2144 /* Convert a comparison tree code from an enum tree_code representation
2145 into a compcode bit-based encoding. This function is the inverse of
2146 compcode_to_comparison. */
2148 static enum comparison_code
2149 comparison_to_compcode (enum tree_code code)
2166 return COMPCODE_ORD;
2167 case UNORDERED_EXPR:
2168 return COMPCODE_UNORD;
2170 return COMPCODE_UNLT;
2172 return COMPCODE_UNEQ;
2174 return COMPCODE_UNLE;
2176 return COMPCODE_UNGT;
2178 return COMPCODE_LTGT;
2180 return COMPCODE_UNGE;
2186 /* Convert a compcode bit-based encoding of a comparison operator back
2187 to GCC's enum tree_code representation. This function is the
2188 inverse of comparison_to_compcode. */
2190 static enum tree_code
2191 compcode_to_comparison (enum comparison_code code)
2208 return ORDERED_EXPR;
2209 case COMPCODE_UNORD:
2210 return UNORDERED_EXPR;
2228 /* Return a tree for the comparison which is the combination of
2229 doing the AND or OR (depending on CODE) of the two operations LCODE
2230 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2231 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2232 if this makes the transformation invalid. */
2235 combine_comparisons (enum tree_code code, enum tree_code lcode,
2236 enum tree_code rcode, tree truth_type,
2237 tree ll_arg, tree lr_arg)
2239 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2240 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2241 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2242 enum comparison_code compcode;
2246 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2247 compcode = lcompcode & rcompcode;
2250 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2251 compcode = lcompcode | rcompcode;
2260 /* Eliminate unordered comparisons, as well as LTGT and ORD
2261 which are not used unless the mode has NaNs. */
2262 compcode &= ~COMPCODE_UNORD;
2263 if (compcode == COMPCODE_LTGT)
2264 compcode = COMPCODE_NE;
2265 else if (compcode == COMPCODE_ORD)
2266 compcode = COMPCODE_TRUE;
2268 else if (flag_trapping_math)
2270 /* Check that the original operation and the optimized ones will trap
2271 under the same condition. */
2272 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2273 && (lcompcode != COMPCODE_EQ)
2274 && (lcompcode != COMPCODE_ORD);
2275 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2276 && (rcompcode != COMPCODE_EQ)
2277 && (rcompcode != COMPCODE_ORD);
2278 bool trap = (compcode & COMPCODE_UNORD) == 0
2279 && (compcode != COMPCODE_EQ)
2280 && (compcode != COMPCODE_ORD);
2282 /* In a short-circuited boolean expression the LHS might be
2283 such that the RHS, if evaluated, will never trap. For
2284 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2285 if neither x nor y is NaN. (This is a mixed blessing: for
2286 example, the expression above will never trap, hence
2287 optimizing it to x < y would be invalid). */
2288 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2289 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2292 /* If the comparison was short-circuited, and only the RHS
2293 trapped, we may now generate a spurious trap. */
2295 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2298 /* If we changed the conditions that cause a trap, we lose. */
2299 if ((ltrap || rtrap) != trap)
2303 if (compcode == COMPCODE_TRUE)
2304 return constant_boolean_node (true, truth_type);
2305 else if (compcode == COMPCODE_FALSE)
2306 return constant_boolean_node (false, truth_type);
2308 return fold (build2 (compcode_to_comparison (compcode),
2309 truth_type, ll_arg, lr_arg));
2312 /* Return nonzero if CODE is a tree code that represents a truth value. */
2315 truth_value_p (enum tree_code code)
2317 return (TREE_CODE_CLASS (code) == '<'
2318 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2319 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2320 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2341 unset means assuming isochronic (or instantaneous) tree equivalence.
2342 Unless comparing arbitrary expression trees, such as from different
2343 statements, this flag can usually be left unset.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2356 /* If both types don't have the same signedness, then we can't consider
2357 them equal. We must check this before the STRIP_NOPS calls
2358 because they may change the signedness of the arguments. */
2359 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2365 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2366 /* This is needed for conversions and for COMPONENT_REF.
2367 Might as well play it safe and always test this. */
2368 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2369 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2370 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2373 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2374 We don't care about side effects in that case because the SAVE_EXPR
2375 takes care of that for us. In all other cases, two expressions are
2376 equal if they have no side effects. If we have two identical
2377 expressions with side effects that should be treated the same due
2378 to the only side effects being identical SAVE_EXPR's, that will
2379 be detected in the recursive calls below. */
2380 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2381 && (TREE_CODE (arg0) == SAVE_EXPR
2382 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2385 /* Next handle constant cases, those for which we can return 1 even
2386 if ONLY_CONST is set. */
2387 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2388 switch (TREE_CODE (arg0))
2391 return (! TREE_CONSTANT_OVERFLOW (arg0)
2392 && ! TREE_CONSTANT_OVERFLOW (arg1)
2393 && tree_int_cst_equal (arg0, arg1));
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2399 TREE_REAL_CST (arg1)));
2405 if (TREE_CONSTANT_OVERFLOW (arg0)
2406 || TREE_CONSTANT_OVERFLOW (arg1))
2409 v1 = TREE_VECTOR_CST_ELTS (arg0);
2410 v2 = TREE_VECTOR_CST_ELTS (arg1);
2413 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2416 v1 = TREE_CHAIN (v1);
2417 v2 = TREE_CHAIN (v2);
2424 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2426 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2430 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2431 && ! memcmp (TREE_STRING_POINTER (arg0),
2432 TREE_STRING_POINTER (arg1),
2433 TREE_STRING_LENGTH (arg0)));
2436 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2442 if (flags & OEP_ONLY_CONST)
2445 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2448 /* Two conversions are equal only if signedness and modes match. */
2449 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2450 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2451 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2454 return operand_equal_p (TREE_OPERAND (arg0, 0),
2455 TREE_OPERAND (arg1, 0), flags);
2459 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2460 TREE_OPERAND (arg1, 0), flags)
2461 && operand_equal_p (TREE_OPERAND (arg0, 1),
2462 TREE_OPERAND (arg1, 1), flags))
2465 /* For commutative ops, allow the other order. */
2466 return (commutative_tree_code (TREE_CODE (arg0))
2467 && operand_equal_p (TREE_OPERAND (arg0, 0),
2468 TREE_OPERAND (arg1, 1), flags)
2469 && operand_equal_p (TREE_OPERAND (arg0, 1),
2470 TREE_OPERAND (arg1, 0), flags));
2473 /* If either of the pointer (or reference) expressions we are
2474 dereferencing contain a side effect, these cannot be equal. */
2475 if (TREE_SIDE_EFFECTS (arg0)
2476 || TREE_SIDE_EFFECTS (arg1))
2479 switch (TREE_CODE (arg0))
2482 return operand_equal_p (TREE_OPERAND (arg0, 0),
2483 TREE_OPERAND (arg1, 0), flags);
2487 case ARRAY_RANGE_REF:
2488 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2489 TREE_OPERAND (arg1, 0), flags)
2490 && operand_equal_p (TREE_OPERAND (arg0, 1),
2491 TREE_OPERAND (arg1, 1), flags));
2494 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2495 TREE_OPERAND (arg1, 0), flags)
2496 && operand_equal_p (TREE_OPERAND (arg0, 1),
2497 TREE_OPERAND (arg1, 1), flags)
2498 && operand_equal_p (TREE_OPERAND (arg0, 2),
2499 TREE_OPERAND (arg1, 2), flags));
2505 switch (TREE_CODE (arg0))
2508 case TRUTH_NOT_EXPR:
2509 return operand_equal_p (TREE_OPERAND (arg0, 0),
2510 TREE_OPERAND (arg1, 0), flags);
2512 case TRUTH_ANDIF_EXPR:
2513 case TRUTH_ORIF_EXPR:
2514 return operand_equal_p (TREE_OPERAND (arg0, 0),
2515 TREE_OPERAND (arg1, 0), flags)
2516 && operand_equal_p (TREE_OPERAND (arg0, 1),
2517 TREE_OPERAND (arg1, 1), flags);
2519 case TRUTH_AND_EXPR:
2521 case TRUTH_XOR_EXPR:
2522 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2523 TREE_OPERAND (arg1, 0), flags)
2524 && operand_equal_p (TREE_OPERAND (arg0, 1),
2525 TREE_OPERAND (arg1, 1), flags))
2526 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2527 TREE_OPERAND (arg1, 1), flags)
2528 && operand_equal_p (TREE_OPERAND (arg0, 1),
2529 TREE_OPERAND (arg1, 0), flags));
2532 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2535 /* If the CALL_EXPRs call different functions, then they
2536 clearly can not be equal. */
2537 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2538 TREE_OPERAND (arg1, 0), flags))
2542 unsigned int cef = call_expr_flags (arg0);
2543 if (flags & OEP_PURE_SAME)
2544 cef &= ECF_CONST | ECF_PURE;
2551 /* Now see if all the arguments are the same. operand_equal_p
2552 does not handle TREE_LIST, so we walk the operands here
2553 feeding them to operand_equal_p. */
2554 arg0 = TREE_OPERAND (arg0, 1);
2555 arg1 = TREE_OPERAND (arg1, 1);
2556 while (arg0 && arg1)
2558 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2562 arg0 = TREE_CHAIN (arg0);
2563 arg1 = TREE_CHAIN (arg1);
2566 /* If we get here and both argument lists are exhausted
2567 then the CALL_EXPRs are equal. */
2568 return ! (arg0 || arg1);
2575 /* Consider __builtin_sqrt equal to sqrt. */
2576 return (TREE_CODE (arg0) == FUNCTION_DECL
2577 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2578 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2579 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2586 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2587 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2589 When in doubt, return 0. */
2592 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2594 int unsignedp1, unsignedpo;
2595 tree primarg0, primarg1, primother;
2596 unsigned int correct_width;
2598 if (operand_equal_p (arg0, arg1, 0))
2601 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2602 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2605 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2606 and see if the inner values are the same. This removes any
2607 signedness comparison, which doesn't matter here. */
2608 primarg0 = arg0, primarg1 = arg1;
2609 STRIP_NOPS (primarg0);
2610 STRIP_NOPS (primarg1);
2611 if (operand_equal_p (primarg0, primarg1, 0))
2614 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2615 actual comparison operand, ARG0.
2617 First throw away any conversions to wider types
2618 already present in the operands. */
2620 primarg1 = get_narrower (arg1, &unsignedp1);
2621 primother = get_narrower (other, &unsignedpo);
2623 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2624 if (unsignedp1 == unsignedpo
2625 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2626 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2628 tree type = TREE_TYPE (arg0);
2630 /* Make sure shorter operand is extended the right way
2631 to match the longer operand. */
2632 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2633 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2635 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2642 /* See if ARG is an expression that is either a comparison or is performing
2643 arithmetic on comparisons. The comparisons must only be comparing
2644 two different values, which will be stored in *CVAL1 and *CVAL2; if
2645 they are nonzero it means that some operands have already been found.
2646 No variables may be used anywhere else in the expression except in the
2647 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2648 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2650 If this is true, return 1. Otherwise, return zero. */
2653 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2655 enum tree_code code = TREE_CODE (arg);
2656 char class = TREE_CODE_CLASS (code);
2658 /* We can handle some of the 'e' cases here. */
2659 if (class == 'e' && code == TRUTH_NOT_EXPR)
2661 else if (class == 'e'
2662 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2663 || code == COMPOUND_EXPR))
2666 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2667 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2669 /* If we've already found a CVAL1 or CVAL2, this expression is
2670 two complex to handle. */
2671 if (*cval1 || *cval2)
2681 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2684 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2685 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2686 cval1, cval2, save_p));
2692 if (code == COND_EXPR)
2693 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2694 cval1, cval2, save_p)
2695 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2696 cval1, cval2, save_p)
2697 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2698 cval1, cval2, save_p));
2702 /* First see if we can handle the first operand, then the second. For
2703 the second operand, we know *CVAL1 can't be zero. It must be that
2704 one side of the comparison is each of the values; test for the
2705 case where this isn't true by failing if the two operands
2708 if (operand_equal_p (TREE_OPERAND (arg, 0),
2709 TREE_OPERAND (arg, 1), 0))
2713 *cval1 = TREE_OPERAND (arg, 0);
2714 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2716 else if (*cval2 == 0)
2717 *cval2 = TREE_OPERAND (arg, 0);
2718 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2723 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2725 else if (*cval2 == 0)
2726 *cval2 = TREE_OPERAND (arg, 1);
2727 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2739 /* ARG is a tree that is known to contain just arithmetic operations and
2740 comparisons. Evaluate the operations in the tree substituting NEW0 for
2741 any occurrence of OLD0 as an operand of a comparison and likewise for
2745 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2747 tree type = TREE_TYPE (arg);
2748 enum tree_code code = TREE_CODE (arg);
2749 char class = TREE_CODE_CLASS (code);
2751 /* We can handle some of the 'e' cases here. */
2752 if (class == 'e' && code == TRUTH_NOT_EXPR)
2754 else if (class == 'e'
2755 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2761 return fold (build1 (code, type,
2762 eval_subst (TREE_OPERAND (arg, 0),
2763 old0, new0, old1, new1)));
2766 return fold (build2 (code, type,
2767 eval_subst (TREE_OPERAND (arg, 0),
2768 old0, new0, old1, new1),
2769 eval_subst (TREE_OPERAND (arg, 1),
2770 old0, new0, old1, new1)));
2776 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2779 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2782 return fold (build3 (code, type,
2783 eval_subst (TREE_OPERAND (arg, 0),
2784 old0, new0, old1, new1),
2785 eval_subst (TREE_OPERAND (arg, 1),
2786 old0, new0, old1, new1),
2787 eval_subst (TREE_OPERAND (arg, 2),
2788 old0, new0, old1, new1)));
2792 /* Fall through - ??? */
2796 tree arg0 = TREE_OPERAND (arg, 0);
2797 tree arg1 = TREE_OPERAND (arg, 1);
2799 /* We need to check both for exact equality and tree equality. The
2800 former will be true if the operand has a side-effect. In that
2801 case, we know the operand occurred exactly once. */
2803 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2805 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2808 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2810 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2813 return fold (build2 (code, type, arg0, arg1));
2821 /* Return a tree for the case when the result of an expression is RESULT
2822 converted to TYPE and OMITTED was previously an operand of the expression
2823 but is now not needed (e.g., we folded OMITTED * 0).
2825 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2826 the conversion of RESULT to TYPE. */
2829 omit_one_operand (tree type, tree result, tree omitted)
2831 tree t = fold_convert (type, result);
2833 if (TREE_SIDE_EFFECTS (omitted))
2834 return build2 (COMPOUND_EXPR, type, omitted, t);
2836 return non_lvalue (t);
2839 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2842 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2844 tree t = fold_convert (type, result);
2846 if (TREE_SIDE_EFFECTS (omitted))
2847 return build2 (COMPOUND_EXPR, type, omitted, t);
2849 return pedantic_non_lvalue (t);
2852 /* Return a tree for the case when the result of an expression is RESULT
2853 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2854 of the expression but are now not needed.
2856 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2857 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2858 evaluated before OMITTED2. Otherwise, if neither has side effects,
2859 just do the conversion of RESULT to TYPE. */
2862 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2864 tree t = fold_convert (type, result);
2866 if (TREE_SIDE_EFFECTS (omitted2))
2867 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2868 if (TREE_SIDE_EFFECTS (omitted1))
2869 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2871 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2875 /* Return a simplified tree node for the truth-negation of ARG. This
2876 never alters ARG itself. We assume that ARG is an operation that
2877 returns a truth value (0 or 1).
2879 FIXME: one would think we would fold the result, but it causes
2880 problems with the dominator optimizer. */
2882 invert_truthvalue (tree arg)
2884 tree type = TREE_TYPE (arg);
2885 enum tree_code code = TREE_CODE (arg);
2887 if (code == ERROR_MARK)
2890 /* If this is a comparison, we can simply invert it, except for
2891 floating-point non-equality comparisons, in which case we just
2892 enclose a TRUTH_NOT_EXPR around what we have. */
2894 if (TREE_CODE_CLASS (code) == '<')
2896 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2897 if (FLOAT_TYPE_P (op_type)
2898 && flag_trapping_math
2899 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2900 && code != NE_EXPR && code != EQ_EXPR)
2901 return build1 (TRUTH_NOT_EXPR, type, arg);
2904 code = invert_tree_comparison (code,
2905 HONOR_NANS (TYPE_MODE (op_type)));
2906 if (code == ERROR_MARK)
2907 return build1 (TRUTH_NOT_EXPR, type, arg);
2909 return build2 (code, type,
2910 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2917 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2919 case TRUTH_AND_EXPR:
2920 return build2 (TRUTH_OR_EXPR, type,
2921 invert_truthvalue (TREE_OPERAND (arg, 0)),
2922 invert_truthvalue (TREE_OPERAND (arg, 1)));
2925 return build2 (TRUTH_AND_EXPR, type,
2926 invert_truthvalue (TREE_OPERAND (arg, 0)),
2927 invert_truthvalue (TREE_OPERAND (arg, 1)));
2929 case TRUTH_XOR_EXPR:
2930 /* Here we can invert either operand. We invert the first operand
2931 unless the second operand is a TRUTH_NOT_EXPR in which case our
2932 result is the XOR of the first operand with the inside of the
2933 negation of the second operand. */
2935 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2936 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2937 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2939 return build2 (TRUTH_XOR_EXPR, type,
2940 invert_truthvalue (TREE_OPERAND (arg, 0)),
2941 TREE_OPERAND (arg, 1));
2943 case TRUTH_ANDIF_EXPR:
2944 return build2 (TRUTH_ORIF_EXPR, type,
2945 invert_truthvalue (TREE_OPERAND (arg, 0)),
2946 invert_truthvalue (TREE_OPERAND (arg, 1)));
2948 case TRUTH_ORIF_EXPR:
2949 return build2 (TRUTH_ANDIF_EXPR, type,
2950 invert_truthvalue (TREE_OPERAND (arg, 0)),
2951 invert_truthvalue (TREE_OPERAND (arg, 1)));
2953 case TRUTH_NOT_EXPR:
2954 return TREE_OPERAND (arg, 0);
2957 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2958 invert_truthvalue (TREE_OPERAND (arg, 1)),
2959 invert_truthvalue (TREE_OPERAND (arg, 2)));
2962 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2963 invert_truthvalue (TREE_OPERAND (arg, 1)));
2965 case NON_LVALUE_EXPR:
2966 return invert_truthvalue (TREE_OPERAND (arg, 0));
2969 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2974 return build1 (TREE_CODE (arg), type,
2975 invert_truthvalue (TREE_OPERAND (arg, 0)));
2978 if (!integer_onep (TREE_OPERAND (arg, 1)))
2980 return build2 (EQ_EXPR, type, arg,
2981 fold_convert (type, integer_zero_node));
2984 return build1 (TRUTH_NOT_EXPR, type, arg);
2986 case CLEANUP_POINT_EXPR:
2987 return build1 (CLEANUP_POINT_EXPR, type,
2988 invert_truthvalue (TREE_OPERAND (arg, 0)));
2993 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2995 return build1 (TRUTH_NOT_EXPR, type, arg);
2998 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2999 operands are another bit-wise operation with a common input. If so,
3000 distribute the bit operations to save an operation and possibly two if
3001 constants are involved. For example, convert
3002 (A | B) & (A | C) into A | (B & C)
3003 Further simplification will occur if B and C are constants.
3005 If this optimization cannot be done, 0 will be returned. */
3008 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3013 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3014 || TREE_CODE (arg0) == code
3015 || (TREE_CODE (arg0) != BIT_AND_EXPR
3016 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3019 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3021 common = TREE_OPERAND (arg0, 0);
3022 left = TREE_OPERAND (arg0, 1);
3023 right = TREE_OPERAND (arg1, 1);
3025 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3027 common = TREE_OPERAND (arg0, 0);
3028 left = TREE_OPERAND (arg0, 1);
3029 right = TREE_OPERAND (arg1, 0);
3031 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3033 common = TREE_OPERAND (arg0, 1);
3034 left = TREE_OPERAND (arg0, 0);
3035 right = TREE_OPERAND (arg1, 1);
3037 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3039 common = TREE_OPERAND (arg0, 1);
3040 left = TREE_OPERAND (arg0, 0);
3041 right = TREE_OPERAND (arg1, 0);
3046 return fold (build2 (TREE_CODE (arg0), type, common,
3047 fold (build2 (code, type, left, right))));
3050 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3051 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3054 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3057 tree result = build3 (BIT_FIELD_REF, type, inner,
3058 size_int (bitsize), bitsize_int (bitpos));
3060 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3065 /* Optimize a bit-field compare.
3067 There are two cases: First is a compare against a constant and the
3068 second is a comparison of two items where the fields are at the same
3069 bit position relative to the start of a chunk (byte, halfword, word)
3070 large enough to contain it. In these cases we can avoid the shift
3071 implicit in bitfield extractions.
3073 For constants, we emit a compare of the shifted constant with the
3074 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3075 compared. For two fields at the same position, we do the ANDs with the
3076 similar mask and compare the result of the ANDs.
3078 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3079 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3080 are the left and right operands of the comparison, respectively.
3082 If the optimization described above can be done, we return the resulting
3083 tree. Otherwise we return zero. */
3086 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3089 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3090 tree type = TREE_TYPE (lhs);
3091 tree signed_type, unsigned_type;
3092 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3093 enum machine_mode lmode, rmode, nmode;
3094 int lunsignedp, runsignedp;
3095 int lvolatilep = 0, rvolatilep = 0;
3096 tree linner, rinner = NULL_TREE;
3100 /* Get all the information about the extractions being done. If the bit size
3101 if the same as the size of the underlying object, we aren't doing an
3102 extraction at all and so can do nothing. We also don't want to
3103 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3104 then will no longer be able to replace it. */
3105 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3106 &lunsignedp, &lvolatilep);
3107 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3108 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3113 /* If this is not a constant, we can only do something if bit positions,
3114 sizes, and signedness are the same. */
3115 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3116 &runsignedp, &rvolatilep);
3118 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3119 || lunsignedp != runsignedp || offset != 0
3120 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3124 /* See if we can find a mode to refer to this field. We should be able to,
3125 but fail if we can't. */
3126 nmode = get_best_mode (lbitsize, lbitpos,
3127 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3128 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3129 TYPE_ALIGN (TREE_TYPE (rinner))),
3130 word_mode, lvolatilep || rvolatilep);
3131 if (nmode == VOIDmode)
3134 /* Set signed and unsigned types of the precision of this mode for the
3136 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3137 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3139 /* Compute the bit position and size for the new reference and our offset
3140 within it. If the new reference is the same size as the original, we
3141 won't optimize anything, so return zero. */
3142 nbitsize = GET_MODE_BITSIZE (nmode);
3143 nbitpos = lbitpos & ~ (nbitsize - 1);
3145 if (nbitsize == lbitsize)
3148 if (BYTES_BIG_ENDIAN)
3149 lbitpos = nbitsize - lbitsize - lbitpos;
3151 /* Make the mask to be used against the extracted field. */
3152 mask = build_int_2 (~0, ~0);
3153 TREE_TYPE (mask) = unsigned_type;
3154 force_fit_type (mask, 0);
3155 mask = fold_convert (unsigned_type, mask);
3156 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3157 mask = const_binop (RSHIFT_EXPR, mask,
3158 size_int (nbitsize - lbitsize - lbitpos), 0);
3161 /* If not comparing with constant, just rework the comparison
3163 return build2 (code, compare_type,
3164 build2 (BIT_AND_EXPR, unsigned_type,
3165 make_bit_field_ref (linner, unsigned_type,
3166 nbitsize, nbitpos, 1),
3168 build2 (BIT_AND_EXPR, unsigned_type,
3169 make_bit_field_ref (rinner, unsigned_type,
3170 nbitsize, nbitpos, 1),
3173 /* Otherwise, we are handling the constant case. See if the constant is too
3174 big for the field. Warn and return a tree of for 0 (false) if so. We do
3175 this not only for its own sake, but to avoid having to test for this
3176 error case below. If we didn't, we might generate wrong code.
3178 For unsigned fields, the constant shifted right by the field length should
3179 be all zero. For signed fields, the high-order bits should agree with
3184 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3185 fold_convert (unsigned_type, rhs),
3186 size_int (lbitsize), 0)))
3188 warning ("comparison is always %d due to width of bit-field",
3190 return constant_boolean_node (code == NE_EXPR, compare_type);
3195 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3196 size_int (lbitsize - 1), 0);
3197 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3199 warning ("comparison is always %d due to width of bit-field",
3201 return constant_boolean_node (code == NE_EXPR, compare_type);
3205 /* Single-bit compares should always be against zero. */
3206 if (lbitsize == 1 && ! integer_zerop (rhs))
3208 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3209 rhs = fold_convert (type, integer_zero_node);
3212 /* Make a new bitfield reference, shift the constant over the
3213 appropriate number of bits and mask it with the computed mask
3214 (in case this was a signed field). If we changed it, make a new one. */
3215 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3218 TREE_SIDE_EFFECTS (lhs) = 1;
3219 TREE_THIS_VOLATILE (lhs) = 1;
3222 rhs = fold (const_binop (BIT_AND_EXPR,
3223 const_binop (LSHIFT_EXPR,
3224 fold_convert (unsigned_type, rhs),
3225 size_int (lbitpos), 0),
3228 return build2 (code, compare_type,
3229 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3233 /* Subroutine for fold_truthop: decode a field reference.
3235 If EXP is a comparison reference, we return the innermost reference.
3237 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3238 set to the starting bit number.
3240 If the innermost field can be completely contained in a mode-sized
3241 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3243 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3244 otherwise it is not changed.
3246 *PUNSIGNEDP is set to the signedness of the field.
3248 *PMASK is set to the mask used. This is either contained in a
3249 BIT_AND_EXPR or derived from the width of the field.
3251 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3253 Return 0 if this is not a component reference or is one that we can't
3254 do anything with. */
3257 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3258 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3259 int *punsignedp, int *pvolatilep,
3260 tree *pmask, tree *pand_mask)
3262 tree outer_type = 0;
3264 tree mask, inner, offset;
3266 unsigned int precision;
3268 /* All the optimizations using this function assume integer fields.
3269 There are problems with FP fields since the type_for_size call
3270 below can fail for, e.g., XFmode. */
3271 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3274 /* We are interested in the bare arrangement of bits, so strip everything
3275 that doesn't affect the machine mode. However, record the type of the
3276 outermost expression if it may matter below. */
3277 if (TREE_CODE (exp) == NOP_EXPR
3278 || TREE_CODE (exp) == CONVERT_EXPR
3279 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3280 outer_type = TREE_TYPE (exp);
3283 if (TREE_CODE (exp) == BIT_AND_EXPR)
3285 and_mask = TREE_OPERAND (exp, 1);
3286 exp = TREE_OPERAND (exp, 0);
3287 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3288 if (TREE_CODE (and_mask) != INTEGER_CST)
3292 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3293 punsignedp, pvolatilep);
3294 if ((inner == exp && and_mask == 0)
3295 || *pbitsize < 0 || offset != 0
3296 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3299 /* If the number of bits in the reference is the same as the bitsize of
3300 the outer type, then the outer type gives the signedness. Otherwise
3301 (in case of a small bitfield) the signedness is unchanged. */
3302 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3303 *punsignedp = TYPE_UNSIGNED (outer_type);
3305 /* Compute the mask to access the bitfield. */
3306 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3307 precision = TYPE_PRECISION (unsigned_type);
3309 mask = build_int_2 (~0, ~0);
3310 TREE_TYPE (mask) = unsigned_type;
3311 force_fit_type (mask, 0);
3312 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3313 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3315 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3317 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3318 fold_convert (unsigned_type, and_mask), mask));
3321 *pand_mask = and_mask;
3325 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3329 all_ones_mask_p (tree mask, int size)
3331 tree type = TREE_TYPE (mask);
3332 unsigned int precision = TYPE_PRECISION (type);
3335 tmask = build_int_2 (~0, ~0);
3336 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3337 force_fit_type (tmask, 0);
3339 tree_int_cst_equal (mask,
3340 const_binop (RSHIFT_EXPR,
3341 const_binop (LSHIFT_EXPR, tmask,
3342 size_int (precision - size),
3344 size_int (precision - size), 0));
3347 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3348 represents the sign bit of EXP's type. If EXP represents a sign
3349 or zero extension, also test VAL against the unextended type.
3350 The return value is the (sub)expression whose sign bit is VAL,
3351 or NULL_TREE otherwise. */
3354 sign_bit_p (tree exp, tree val)
3356 unsigned HOST_WIDE_INT mask_lo, lo;
3357 HOST_WIDE_INT mask_hi, hi;
3361 /* Tree EXP must have an integral type. */
3362 t = TREE_TYPE (exp);
3363 if (! INTEGRAL_TYPE_P (t))
3366 /* Tree VAL must be an integer constant. */
3367 if (TREE_CODE (val) != INTEGER_CST
3368 || TREE_CONSTANT_OVERFLOW (val))
3371 width = TYPE_PRECISION (t);
3372 if (width > HOST_BITS_PER_WIDE_INT)
3374 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3377 mask_hi = ((unsigned HOST_WIDE_INT) -1
3378 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3384 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3387 mask_lo = ((unsigned HOST_WIDE_INT) -1
3388 >> (HOST_BITS_PER_WIDE_INT - width));
3391 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3392 treat VAL as if it were unsigned. */
3393 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3394 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3397 /* Handle extension from a narrower type. */
3398 if (TREE_CODE (exp) == NOP_EXPR
3399 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3400 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3405 /* Subroutine for fold_truthop: determine if an operand is simple enough
3406 to be evaluated unconditionally. */
3409 simple_operand_p (tree exp)
3411 /* Strip any conversions that don't change the machine mode. */
3412 while ((TREE_CODE (exp) == NOP_EXPR
3413 || TREE_CODE (exp) == CONVERT_EXPR)
3414 && (TYPE_MODE (TREE_TYPE (exp))
3415 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3416 exp = TREE_OPERAND (exp, 0);
3418 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3420 && ! TREE_ADDRESSABLE (exp)
3421 && ! TREE_THIS_VOLATILE (exp)
3422 && ! DECL_NONLOCAL (exp)
3423 /* Don't regard global variables as simple. They may be
3424 allocated in ways unknown to the compiler (shared memory,
3425 #pragma weak, etc). */
3426 && ! TREE_PUBLIC (exp)
3427 && ! DECL_EXTERNAL (exp)
3428 /* Loading a static variable is unduly expensive, but global
3429 registers aren't expensive. */
3430 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3433 /* The following functions are subroutines to fold_range_test and allow it to
3434 try to change a logical combination of comparisons into a range test.
3437 X == 2 || X == 3 || X == 4 || X == 5
3441 (unsigned) (X - 2) <= 3
3443 We describe each set of comparisons as being either inside or outside
3444 a range, using a variable named like IN_P, and then describe the
3445 range with a lower and upper bound. If one of the bounds is omitted,
3446 it represents either the highest or lowest value of the type.
3448 In the comments below, we represent a range by two numbers in brackets
3449 preceded by a "+" to designate being inside that range, or a "-" to
3450 designate being outside that range, so the condition can be inverted by
3451 flipping the prefix. An omitted bound is represented by a "-". For
3452 example, "- [-, 10]" means being outside the range starting at the lowest
3453 possible value and ending at 10, in other words, being greater than 10.
3454 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3457 We set up things so that the missing bounds are handled in a consistent
3458 manner so neither a missing bound nor "true" and "false" need to be
3459 handled using a special case. */
3461 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3462 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3463 and UPPER1_P are nonzero if the respective argument is an upper bound
3464 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3465 must be specified for a comparison. ARG1 will be converted to ARG0's
3466 type if both are specified. */
3469 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3470 tree arg1, int upper1_p)
3476 /* If neither arg represents infinity, do the normal operation.
3477 Else, if not a comparison, return infinity. Else handle the special
3478 comparison rules. Note that most of the cases below won't occur, but
3479 are handled for consistency. */
3481 if (arg0 != 0 && arg1 != 0)
3483 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3484 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3486 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3489 if (TREE_CODE_CLASS (code) != '<')
3492 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3493 for neither. In real maths, we cannot assume open ended ranges are
3494 the same. But, this is computer arithmetic, where numbers are finite.
3495 We can therefore make the transformation of any unbounded range with
3496 the value Z, Z being greater than any representable number. This permits
3497 us to treat unbounded ranges as equal. */
3498 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3499 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3503 result = sgn0 == sgn1;
3506 result = sgn0 != sgn1;
3509 result = sgn0 < sgn1;
3512 result = sgn0 <= sgn1;
3515 result = sgn0 > sgn1;
3518 result = sgn0 >= sgn1;
3524 return constant_boolean_node (result, type);
3527 /* Given EXP, a logical expression, set the range it is testing into
3528 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3529 actually being tested. *PLOW and *PHIGH will be made of the same type
3530 as the returned expression. If EXP is not a comparison, we will most
3531 likely not be returning a useful value and range. */
3534 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3536 enum tree_code code;
3537 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3538 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3540 tree low, high, n_low, n_high;
3542 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3543 and see if we can refine the range. Some of the cases below may not
3544 happen, but it doesn't seem worth worrying about this. We "continue"
3545 the outer loop when we've changed something; otherwise we "break"
3546 the switch, which will "break" the while. */
3549 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3553 code = TREE_CODE (exp);
3554 exp_type = TREE_TYPE (exp);
3556 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3558 if (first_rtl_op (code) > 0)
3559 arg0 = TREE_OPERAND (exp, 0);
3560 if (TREE_CODE_CLASS (code) == '<'
3561 || TREE_CODE_CLASS (code) == '1'
3562 || TREE_CODE_CLASS (code) == '2')
3563 arg0_type = TREE_TYPE (arg0);
3564 if (TREE_CODE_CLASS (code) == '2'
3565 || TREE_CODE_CLASS (code) == '<'
3566 || (TREE_CODE_CLASS (code) == 'e'
3567 && TREE_CODE_LENGTH (code) > 1))
3568 arg1 = TREE_OPERAND (exp, 1);
3573 case TRUTH_NOT_EXPR:
3574 in_p = ! in_p, exp = arg0;
3577 case EQ_EXPR: case NE_EXPR:
3578 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3579 /* We can only do something if the range is testing for zero
3580 and if the second operand is an integer constant. Note that
3581 saying something is "in" the range we make is done by
3582 complementing IN_P since it will set in the initial case of
3583 being not equal to zero; "out" is leaving it alone. */
3584 if (low == 0 || high == 0
3585 || ! integer_zerop (low) || ! integer_zerop (high)
3586 || TREE_CODE (arg1) != INTEGER_CST)
3591 case NE_EXPR: /* - [c, c] */
3594 case EQ_EXPR: /* + [c, c] */
3595 in_p = ! in_p, low = high = arg1;
3597 case GT_EXPR: /* - [-, c] */
3598 low = 0, high = arg1;
3600 case GE_EXPR: /* + [c, -] */
3601 in_p = ! in_p, low = arg1, high = 0;
3603 case LT_EXPR: /* - [c, -] */
3604 low = arg1, high = 0;
3606 case LE_EXPR: /* + [-, c] */
3607 in_p = ! in_p, low = 0, high = arg1;
3613 /* If this is an unsigned comparison, we also know that EXP is
3614 greater than or equal to zero. We base the range tests we make
3615 on that fact, so we record it here so we can parse existing
3616 range tests. We test arg0_type since often the return type
3617 of, e.g. EQ_EXPR, is boolean. */
3618 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3620 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3621 1, fold_convert (arg0_type, integer_zero_node),
3625 in_p = n_in_p, low = n_low, high = n_high;
3627 /* If the high bound is missing, but we have a nonzero low
3628 bound, reverse the range so it goes from zero to the low bound
3630 if (high == 0 && low && ! integer_zerop (low))
3633 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3634 integer_one_node, 0);
3635 low = fold_convert (arg0_type, integer_zero_node);
3643 /* (-x) IN [a,b] -> x in [-b, -a] */
3644 n_low = range_binop (MINUS_EXPR, exp_type,
3645 fold_convert (exp_type, integer_zero_node),
3647 n_high = range_binop (MINUS_EXPR, exp_type,
3648 fold_convert (exp_type, integer_zero_node),
3650 low = n_low, high = n_high;
3656 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3657 fold_convert (exp_type, integer_one_node));
3660 case PLUS_EXPR: case MINUS_EXPR:
3661 if (TREE_CODE (arg1) != INTEGER_CST)
3664 /* If EXP is signed, any overflow in the computation is undefined,
3665 so we don't worry about it so long as our computations on
3666 the bounds don't overflow. For unsigned, overflow is defined
3667 and this is exactly the right thing. */
3668 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3669 arg0_type, low, 0, arg1, 0);
3670 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3671 arg0_type, high, 1, arg1, 0);
3672 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3673 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3676 /* Check for an unsigned range which has wrapped around the maximum
3677 value thus making n_high < n_low, and normalize it. */
3678 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3680 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3681 integer_one_node, 0);
3682 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3683 integer_one_node, 0);
3685 /* If the range is of the form +/- [ x+1, x ], we won't
3686 be able to normalize it. But then, it represents the
3687 whole range or the empty set, so make it
3689 if (tree_int_cst_equal (n_low, low)
3690 && tree_int_cst_equal (n_high, high))
3696 low = n_low, high = n_high;
3701 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3702 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3705 if (! INTEGRAL_TYPE_P (arg0_type)
3706 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3707 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3710 n_low = low, n_high = high;
3713 n_low = fold_convert (arg0_type, n_low);
3716 n_high = fold_convert (arg0_type, n_high);
3719 /* If we're converting arg0 from an unsigned type, to exp,
3720 a signed type, we will be doing the compairson as unsigned.
3721 The tests above have already verified that LOW and HIGH
3724 So we have to ensure that we will handle large unsigned
3725 values the same way that the current signed bounds treat
3728 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3731 tree equiv_type = lang_hooks.types.type_for_mode
3732 (TYPE_MODE (arg0_type), 1);
3734 /* A range without an upper bound is, naturally, unbounded.
3735 Since convert would have cropped a very large value, use
3736 the max value for the destination type. */
3738 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3739 : TYPE_MAX_VALUE (arg0_type);
3741 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3742 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3743 fold_convert (arg0_type,
3745 fold_convert (arg0_type,
3746 integer_one_node)));
3748 /* If the low bound is specified, "and" the range with the
3749 range for which the original unsigned value will be
3753 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3754 1, n_low, n_high, 1,
3755 fold_convert (arg0_type, integer_zero_node),
3759 in_p = (n_in_p == in_p);
3763 /* Otherwise, "or" the range with the range of the input
3764 that will be interpreted as negative. */
3765 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3766 0, n_low, n_high, 1,
3767 fold_convert (arg0_type, integer_zero_node),
3771 in_p = (in_p != n_in_p);
3776 low = n_low, high = n_high;
3786 /* If EXP is a constant, we can evaluate whether this is true or false. */
3787 if (TREE_CODE (exp) == INTEGER_CST)
3789 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3791 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3797 *pin_p = in_p, *plow = low, *phigh = high;
3801 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3802 type, TYPE, return an expression to test if EXP is in (or out of, depending
3803 on IN_P) the range. Return 0 if the test couldn't be created. */
3806 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3808 tree etype = TREE_TYPE (exp);
3813 value = build_range_check (type, exp, 1, low, high);
3815 return invert_truthvalue (value);
3820 if (low == 0 && high == 0)
3821 return fold_convert (type, integer_one_node);
3824 return fold (build2 (LE_EXPR, type, exp, high));
3827 return fold (build2 (GE_EXPR, type, exp, low));
3829 if (operand_equal_p (low, high, 0))
3830 return fold (build2 (EQ_EXPR, type, exp, low));
3832 if (integer_zerop (low))
3834 if (! TYPE_UNSIGNED (etype))
3836 etype = lang_hooks.types.unsigned_type (etype);
3837 high = fold_convert (etype, high);
3838 exp = fold_convert (etype, exp);
3840 return build_range_check (type, exp, 1, 0, high);
3843 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3844 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3846 unsigned HOST_WIDE_INT lo;
3850 prec = TYPE_PRECISION (etype);
3851 if (prec <= HOST_BITS_PER_WIDE_INT)
3854 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3858 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3859 lo = (unsigned HOST_WIDE_INT) -1;
3862 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3864 if (TYPE_UNSIGNED (etype))
3866 etype = lang_hooks.types.signed_type (etype);
3867 exp = fold_convert (etype, exp);
3869 return fold (build2 (GT_EXPR, type, exp,
3870 fold_convert (etype, integer_zero_node)));
3874 value = const_binop (MINUS_EXPR, high, low, 0);
3875 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3877 tree utype, minv, maxv;
3879 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3880 for the type in question, as we rely on this here. */
3881 switch (TREE_CODE (etype))
3886 utype = lang_hooks.types.unsigned_type (etype);
3887 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3888 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3889 integer_one_node, 1);
3890 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3891 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3895 high = fold_convert (etype, high);
3896 low = fold_convert (etype, low);
3897 exp = fold_convert (etype, exp);
3898 value = const_binop (MINUS_EXPR, high, low, 0);
3906 if (value != 0 && ! TREE_OVERFLOW (value))
3907 return build_range_check (type,
3908 fold (build2 (MINUS_EXPR, etype, exp, low)),
3909 1, fold_convert (etype, integer_zero_node),
3915 /* Given two ranges, see if we can merge them into one. Return 1 if we
3916 can, 0 if we can't. Set the output range into the specified parameters. */
3919 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3920 tree high0, int in1_p, tree low1, tree high1)
3928 int lowequal = ((low0 == 0 && low1 == 0)
3929 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3930 low0, 0, low1, 0)));
3931 int highequal = ((high0 == 0 && high1 == 0)
3932 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3933 high0, 1, high1, 1)));
3935 /* Make range 0 be the range that starts first, or ends last if they
3936 start at the same value. Swap them if it isn't. */
3937 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3940 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3941 high1, 1, high0, 1))))
3943 temp = in0_p, in0_p = in1_p, in1_p = temp;
3944 tem = low0, low0 = low1, low1 = tem;
3945 tem = high0, high0 = high1, high1 = tem;
3948 /* Now flag two cases, whether the ranges are disjoint or whether the
3949 second range is totally subsumed in the first. Note that the tests
3950 below are simplified by the ones above. */
3951 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3952 high0, 1, low1, 0));
3953 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3954 high1, 1, high0, 1));
3956 /* We now have four cases, depending on whether we are including or
3957 excluding the two ranges. */
3960 /* If they don't overlap, the result is false. If the second range
3961 is a subset it is the result. Otherwise, the range is from the start
3962 of the second to the end of the first. */
3964 in_p = 0, low = high = 0;
3966 in_p = 1, low = low1, high = high1;
3968 in_p = 1, low = low1, high = high0;
3971 else if (in0_p && ! in1_p)
3973 /* If they don't overlap, the result is the first range. If they are
3974 equal, the result is false. If the second range is a subset of the
3975 first, and the ranges begin at the same place, we go from just after
3976 the end of the first range to the end of the second. If the second
3977 range is not a subset of the first, or if it is a subset and both
3978 ranges end at the same place, the range starts at the start of the
3979 first range and ends just before the second range.
3980 Otherwise, we can't describe this as a single range. */
3982 in_p = 1, low = low0, high = high0;
3983 else if (lowequal && highequal)
3984 in_p = 0, low = high = 0;
3985 else if (subset && lowequal)
3987 in_p = 1, high = high0;
3988 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3989 integer_one_node, 0);
3991 else if (! subset || highequal)
3993 in_p = 1, low = low0;
3994 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3995 integer_one_node, 0);
4001 else if (! in0_p && in1_p)
4003 /* If they don't overlap, the result is the second range. If the second
4004 is a subset of the first, the result is false. Otherwise,
4005 the range starts just after the first range and ends at the
4006 end of the second. */
4008 in_p = 1, low = low1, high = high1;
4009 else if (subset || highequal)
4010 in_p = 0, low = high = 0;
4013 in_p = 1, high = high1;
4014 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4015 integer_one_node, 0);
4021 /* The case where we are excluding both ranges. Here the complex case
4022 is if they don't overlap. In that case, the only time we have a
4023 range is if they are adjacent. If the second is a subset of the
4024 first, the result is the first. Otherwise, the range to exclude
4025 starts at the beginning of the first range and ends at the end of the
4029 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4030 range_binop (PLUS_EXPR, NULL_TREE,
4032 integer_one_node, 1),
4034 in_p = 0, low = low0, high = high1;
4037 /* Canonicalize - [min, x] into - [-, x]. */
4038 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4039 switch (TREE_CODE (TREE_TYPE (low0)))
4042 if (TYPE_PRECISION (TREE_TYPE (low0))
4043 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4048 if (tree_int_cst_equal (low0,
4049 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4053 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4054 && integer_zerop (low0))
4061 /* Canonicalize - [x, max] into - [x, -]. */
4062 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4063 switch (TREE_CODE (TREE_TYPE (high1)))
4066 if (TYPE_PRECISION (TREE_TYPE (high1))
4067 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4072 if (tree_int_cst_equal (high1,
4073 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4077 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4078 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4080 integer_one_node, 1)))
4087 /* The ranges might be also adjacent between the maximum and
4088 minimum values of the given type. For
4089 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4090 return + [x + 1, y - 1]. */
4091 if (low0 == 0 && high1 == 0)
4093 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4094 integer_one_node, 1);
4095 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4096 integer_one_node, 0);
4097 if (low == 0 || high == 0)
4107 in_p = 0, low = low0, high = high0;
4109 in_p = 0, low = low0, high = high1;
4112 *pin_p = in_p, *plow = low, *phigh = high;
4117 /* Subroutine of fold, looking inside expressions of the form
4118 A op B ? A : C, where ARG0 is A op B and ARG2 is C. This
4119 function is being used also to optimize A op B ? C : A, by
4120 reversing the comparison first.
4122 Return a folded expression whose code is not a COND_EXPR
4123 anymore, or NULL_TREE if no folding opportunity is found. */
4126 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg2)
4128 enum tree_code comp_code = TREE_CODE (arg0);
4129 tree arg00 = TREE_OPERAND (arg0, 0);
4130 tree arg01 = TREE_OPERAND (arg0, 1);
4134 /* If we have A op 0 ? A : -A, consider applying the following
4137 A == 0? A : -A same as -A
4138 A != 0? A : -A same as A
4139 A >= 0? A : -A same as abs (A)
4140 A > 0? A : -A same as abs (A)
4141 A <= 0? A : -A same as -abs (A)
4142 A < 0? A : -A same as -abs (A)
4144 None of these transformations work for modes with signed
4145 zeros. If A is +/-0, the first two transformations will
4146 change the sign of the result (from +0 to -0, or vice
4147 versa). The last four will fix the sign of the result,
4148 even though the original expressions could be positive or
4149 negative, depending on the sign of A.
4151 Note that all these transformations are correct if A is
4152 NaN, since the two alternatives (A and -A) are also NaNs. */
4153 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4154 ? real_zerop (arg01)
4155 : integer_zerop (arg01))
4156 && TREE_CODE (arg2) == NEGATE_EXPR
4157 && operand_equal_p (TREE_OPERAND (arg2, 0), arg00, 0))
4161 return fold_convert (type, negate_expr (arg00));
4163 return pedantic_non_lvalue (fold_convert (type, arg00));
4166 if (TYPE_UNSIGNED (TREE_TYPE (arg00)))
4167 arg00 = fold_convert (lang_hooks.types.signed_type
4168 (TREE_TYPE (arg00)), arg00);
4169 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg00), arg00));
4170 return pedantic_non_lvalue (fold_convert (type, tem));
4173 if (TYPE_UNSIGNED (TREE_TYPE (arg00)))
4174 arg00 = fold_convert (lang_hooks.types.signed_type
4175 (TREE_TYPE (arg00)), arg00);
4176 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg00), arg00));
4177 return negate_expr (fold_convert (type, tem));
4182 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4183 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4184 both transformations are correct when A is NaN: A != 0
4185 is then true, and A == 0 is false. */
4187 if (integer_zerop (arg01) && integer_zerop (arg2))
4189 if (comp_code == NE_EXPR)
4190 return pedantic_non_lvalue (fold_convert (type, arg00));
4191 else if (comp_code == EQ_EXPR)
4192 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
4195 /* Try some transformations of A op B ? A : B.
4197 A == B? A : B same as B
4198 A != B? A : B same as A
4199 A >= B? A : B same as max (A, B)
4200 A > B? A : B same as max (B, A)
4201 A <= B? A : B same as min (A, B)
4202 A < B? A : B same as min (B, A)
4204 As above, these transformations don't work in the presence
4205 of signed zeros. For example, if A and B are zeros of
4206 opposite sign, the first two transformations will change
4207 the sign of the result. In the last four, the original
4208 expressions give different results for (A=+0, B=-0) and
4209 (A=-0, B=+0), but the transformed expressions do not.
4211 The first two transformations are correct if either A or B
4212 is a NaN. In the first transformation, the condition will
4213 be false, and B will indeed be chosen. In the case of the
4214 second transformation, the condition A != B will be true,
4215 and A will be chosen.
4217 The conversions to max() and min() are not correct if B is
4218 a number and A is not. The conditions in the original
4219 expressions will be false, so all four give B. The min()
4220 and max() versions would give a NaN instead. */
4221 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4223 tree comp_op0 = arg00;
4224 tree comp_op1 = arg01;
4225 tree comp_type = TREE_TYPE (comp_op0);
4227 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4228 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4238 return pedantic_non_lvalue (fold_convert (type, arg2));
4240 return pedantic_non_lvalue (fold_convert (type, arg00));
4243 /* In C++ a ?: expression can be an lvalue, so put the
4244 operand which will be used if they are equal first
4245 so that we can convert this back to the
4246 corresponding COND_EXPR. */
4247 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))))
4248 return pedantic_non_lvalue (
4249 fold_convert (type, fold (build2 (MIN_EXPR, comp_type,
4250 (comp_code == LE_EXPR
4251 ? comp_op0 : comp_op1),
4252 (comp_code == LE_EXPR
4253 ? comp_op1 : comp_op0)))));
4257 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))))
4258 return pedantic_non_lvalue (
4259 fold_convert (type, fold (build2 (MAX_EXPR, comp_type,
4260 (comp_code == GE_EXPR
4261 ? comp_op0 : comp_op1),
4262 (comp_code == GE_EXPR
4263 ? comp_op1 : comp_op0)))));
4270 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4271 we might still be able to simplify this. For example,
4272 if C1 is one less or one more than C2, this might have started
4273 out as a MIN or MAX and been transformed by this function.
4274 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4276 if (INTEGRAL_TYPE_P (type)
4277 && TREE_CODE (arg01) == INTEGER_CST
4278 && TREE_CODE (arg2) == INTEGER_CST)
4282 /* We can replace A with C1 in this case. */
4283 arg00 = fold_convert (type, arg01);
4284 return fold (build3 (COND_EXPR, type, arg0, arg00, arg2));
4287 /* If C1 is C2 + 1, this is min(A, C2). */
4288 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4290 && operand_equal_p (arg01,
4291 const_binop (PLUS_EXPR, arg2,
4292 integer_one_node, 0),
4294 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4295 type, arg00, arg2)));
4299 /* If C1 is C2 - 1, this is min(A, C2). */
4300 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4302 && operand_equal_p (arg01,
4303 const_binop (MINUS_EXPR, arg2,
4304 integer_one_node, 0),
4306 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4307 type, arg00, arg2)));
4311 /* If C1 is C2 - 1, this is max(A, C2). */
4312 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4314 && operand_equal_p (arg01,
4315 const_binop (MINUS_EXPR, arg2,
4316 integer_one_node, 0),
4318 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4319 type, arg00, arg2)));
4323 /* If C1 is C2 + 1, this is max(A, C2). */
4324 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4326 && operand_equal_p (arg01,
4327 const_binop (PLUS_EXPR, arg2,
4328 integer_one_node, 0),
4330 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4331 type, arg00, arg2)));
4344 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4345 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4348 /* EXP is some logical combination of boolean tests. See if we can
4349 merge it into some range test. Return the new tree if so. */
4352 fold_range_test (tree exp)
4354 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4355 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4356 int in0_p, in1_p, in_p;
4357 tree low0, low1, low, high0, high1, high;
4358 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4359 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4362 /* If this is an OR operation, invert both sides; we will invert
4363 again at the end. */
4365 in0_p = ! in0_p, in1_p = ! in1_p;
4367 /* If both expressions are the same, if we can merge the ranges, and we
4368 can build the range test, return it or it inverted. If one of the
4369 ranges is always true or always false, consider it to be the same
4370 expression as the other. */
4371 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4372 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4374 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4376 : rhs != 0 ? rhs : integer_zero_node,
4378 return or_op ? invert_truthvalue (tem) : tem;
4380 /* On machines where the branch cost is expensive, if this is a
4381 short-circuited branch and the underlying object on both sides
4382 is the same, make a non-short-circuit operation. */
4383 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4384 && lhs != 0 && rhs != 0
4385 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4386 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4387 && operand_equal_p (lhs, rhs, 0))
4389 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4390 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4391 which cases we can't do this. */
4392 if (simple_operand_p (lhs))
4393 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4394 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4395 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4396 TREE_OPERAND (exp, 1));
4398 else if (lang_hooks.decls.global_bindings_p () == 0
4399 && ! CONTAINS_PLACEHOLDER_P (lhs))
4401 tree common = save_expr (lhs);
4403 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4404 or_op ? ! in0_p : in0_p,
4406 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4407 or_op ? ! in1_p : in1_p,
4409 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4410 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4411 TREE_TYPE (exp), lhs, rhs);
4418 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4419 bit value. Arrange things so the extra bits will be set to zero if and
4420 only if C is signed-extended to its full width. If MASK is nonzero,
4421 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4424 unextend (tree c, int p, int unsignedp, tree mask)
4426 tree type = TREE_TYPE (c);
4427 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4430 if (p == modesize || unsignedp)
4433 /* We work by getting just the sign bit into the low-order bit, then
4434 into the high-order bit, then sign-extend. We then XOR that value
4436 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4437 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4439 /* We must use a signed type in order to get an arithmetic right shift.
4440 However, we must also avoid introducing accidental overflows, so that
4441 a subsequent call to integer_zerop will work. Hence we must
4442 do the type conversion here. At this point, the constant is either
4443 zero or one, and the conversion to a signed type can never overflow.
4444 We could get an overflow if this conversion is done anywhere else. */
4445 if (TYPE_UNSIGNED (type))
4446 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4448 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4449 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4451 temp = const_binop (BIT_AND_EXPR, temp,
4452 fold_convert (TREE_TYPE (c), mask), 0);
4453 /* If necessary, convert the type back to match the type of C. */
4454 if (TYPE_UNSIGNED (type))
4455 temp = fold_convert (type, temp);
4457 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4460 /* Find ways of folding logical expressions of LHS and RHS:
4461 Try to merge two comparisons to the same innermost item.
4462 Look for range tests like "ch >= '0' && ch <= '9'".
4463 Look for combinations of simple terms on machines with expensive branches
4464 and evaluate the RHS unconditionally.
4466 For example, if we have p->a == 2 && p->b == 4 and we can make an
4467 object large enough to span both A and B, we can do this with a comparison
4468 against the object ANDed with the a mask.
4470 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4471 operations to do this with one comparison.
4473 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4474 function and the one above.
4476 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4477 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4479 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4482 We return the simplified tree or 0 if no optimization is possible. */
4485 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4487 /* If this is the "or" of two comparisons, we can do something if
4488 the comparisons are NE_EXPR. If this is the "and", we can do something
4489 if the comparisons are EQ_EXPR. I.e.,
4490 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4492 WANTED_CODE is this operation code. For single bit fields, we can
4493 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4494 comparison for one-bit fields. */
4496 enum tree_code wanted_code;
4497 enum tree_code lcode, rcode;
4498 tree ll_arg, lr_arg, rl_arg, rr_arg;
4499 tree ll_inner, lr_inner, rl_inner, rr_inner;
4500 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4501 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4502 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4503 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4504 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4505 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4506 enum machine_mode lnmode, rnmode;
4507 tree ll_mask, lr_mask, rl_mask, rr_mask;
4508 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4509 tree l_const, r_const;
4510 tree lntype, rntype, result;
4511 int first_bit, end_bit;
4514 /* Start by getting the comparison codes. Fail if anything is volatile.
4515 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4516 it were surrounded with a NE_EXPR. */
4518 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4521 lcode = TREE_CODE (lhs);
4522 rcode = TREE_CODE (rhs);
4524 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4526 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4530 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4532 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4536 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4539 ll_arg = TREE_OPERAND (lhs, 0);
4540 lr_arg = TREE_OPERAND (lhs, 1);
4541 rl_arg = TREE_OPERAND (rhs, 0);
4542 rr_arg = TREE_OPERAND (rhs, 1);
4544 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4545 if (simple_operand_p (ll_arg)
4546 && simple_operand_p (lr_arg))
4549 if (operand_equal_p (ll_arg, rl_arg, 0)
4550 && operand_equal_p (lr_arg, rr_arg, 0))
4552 result = combine_comparisons (code, lcode, rcode,
4553 truth_type, ll_arg, lr_arg);
4557 else if (operand_equal_p (ll_arg, rr_arg, 0)
4558 && operand_equal_p (lr_arg, rl_arg, 0))
4560 result = combine_comparisons (code, lcode,
4561 swap_tree_comparison (rcode),
4562 truth_type, ll_arg, lr_arg);
4568 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4569 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4571 /* If the RHS can be evaluated unconditionally and its operands are
4572 simple, it wins to evaluate the RHS unconditionally on machines
4573 with expensive branches. In this case, this isn't a comparison
4574 that can be merged. Avoid doing this if the RHS is a floating-point
4575 comparison since those can trap. */
4577 if (BRANCH_COST >= 2
4578 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4579 && simple_operand_p (rl_arg)
4580 && simple_operand_p (rr_arg))
4582 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4583 if (code == TRUTH_OR_EXPR
4584 && lcode == NE_EXPR && integer_zerop (lr_arg)
4585 && rcode == NE_EXPR && integer_zerop (rr_arg)
4586 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4587 return build2 (NE_EXPR, truth_type,
4588 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4590 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4592 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4593 if (code == TRUTH_AND_EXPR
4594 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4595 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4596 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4597 return build2 (EQ_EXPR, truth_type,
4598 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4600 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4602 return build2 (code, truth_type, lhs, rhs);
4605 /* See if the comparisons can be merged. Then get all the parameters for
4608 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4609 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4613 ll_inner = decode_field_reference (ll_arg,
4614 &ll_bitsize, &ll_bitpos, &ll_mode,
4615 &ll_unsignedp, &volatilep, &ll_mask,
4617 lr_inner = decode_field_reference (lr_arg,
4618 &lr_bitsize, &lr_bitpos, &lr_mode,
4619 &lr_unsignedp, &volatilep, &lr_mask,
4621 rl_inner = decode_field_reference (rl_arg,
4622 &rl_bitsize, &rl_bitpos, &rl_mode,
4623 &rl_unsignedp, &volatilep, &rl_mask,
4625 rr_inner = decode_field_reference (rr_arg,
4626 &rr_bitsize, &rr_bitpos, &rr_mode,
4627 &rr_unsignedp, &volatilep, &rr_mask,
4630 /* It must be true that the inner operation on the lhs of each
4631 comparison must be the same if we are to be able to do anything.
4632 Then see if we have constants. If not, the same must be true for
4634 if (volatilep || ll_inner == 0 || rl_inner == 0
4635 || ! operand_equal_p (ll_inner, rl_inner, 0))
4638 if (TREE_CODE (lr_arg) == INTEGER_CST
4639 && TREE_CODE (rr_arg) == INTEGER_CST)
4640 l_const = lr_arg, r_const = rr_arg;
4641 else if (lr_inner == 0 || rr_inner == 0
4642 || ! operand_equal_p (lr_inner, rr_inner, 0))
4645 l_const = r_const = 0;
4647 /* If either comparison code is not correct for our logical operation,
4648 fail. However, we can convert a one-bit comparison against zero into
4649 the opposite comparison against that bit being set in the field. */
4651 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4652 if (lcode != wanted_code)
4654 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4656 /* Make the left operand unsigned, since we are only interested
4657 in the value of one bit. Otherwise we are doing the wrong
4666 /* This is analogous to the code for l_const above. */
4667 if (rcode != wanted_code)
4669 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4678 /* After this point all optimizations will generate bit-field
4679 references, which we might not want. */
4680 if (! lang_hooks.can_use_bit_fields_p ())
4683 /* See if we can find a mode that contains both fields being compared on
4684 the left. If we can't, fail. Otherwise, update all constants and masks
4685 to be relative to a field of that size. */
4686 first_bit = MIN (ll_bitpos, rl_bitpos);
4687 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4688 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4689 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4691 if (lnmode == VOIDmode)
4694 lnbitsize = GET_MODE_BITSIZE (lnmode);
4695 lnbitpos = first_bit & ~ (lnbitsize - 1);
4696 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4697 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4699 if (BYTES_BIG_ENDIAN)
4701 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4702 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4705 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4706 size_int (xll_bitpos), 0);
4707 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4708 size_int (xrl_bitpos), 0);
4712 l_const = fold_convert (lntype, l_const);
4713 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4714 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4715 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4716 fold (build1 (BIT_NOT_EXPR,
4720 warning ("comparison is always %d", wanted_code == NE_EXPR);
4722 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4727 r_const = fold_convert (lntype, r_const);
4728 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4729 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4730 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4731 fold (build1 (BIT_NOT_EXPR,
4735 warning ("comparison is always %d", wanted_code == NE_EXPR);
4737 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4741 /* If the right sides are not constant, do the same for it. Also,
4742 disallow this optimization if a size or signedness mismatch occurs
4743 between the left and right sides. */
4746 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4747 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4748 /* Make sure the two fields on the right
4749 correspond to the left without being swapped. */
4750 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4753 first_bit = MIN (lr_bitpos, rr_bitpos);
4754 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4755 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4756 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4758 if (rnmode == VOIDmode)
4761 rnbitsize = GET_MODE_BITSIZE (rnmode);
4762 rnbitpos = first_bit & ~ (rnbitsize - 1);
4763 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4764 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4766 if (BYTES_BIG_ENDIAN)
4768 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4769 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4772 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4773 size_int (xlr_bitpos), 0);
4774 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4775 size_int (xrr_bitpos), 0);
4777 /* Make a mask that corresponds to both fields being compared.
4778 Do this for both items being compared. If the operands are the
4779 same size and the bits being compared are in the same position
4780 then we can do this by masking both and comparing the masked
4782 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4783 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4784 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4786 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4787 ll_unsignedp || rl_unsignedp);
4788 if (! all_ones_mask_p (ll_mask, lnbitsize))
4789 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4791 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4792 lr_unsignedp || rr_unsignedp);
4793 if (! all_ones_mask_p (lr_mask, rnbitsize))
4794 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4796 return build2 (wanted_code, truth_type, lhs, rhs);
4799 /* There is still another way we can do something: If both pairs of
4800 fields being compared are adjacent, we may be able to make a wider
4801 field containing them both.
4803 Note that we still must mask the lhs/rhs expressions. Furthermore,
4804 the mask must be shifted to account for the shift done by
4805 make_bit_field_ref. */
4806 if ((ll_bitsize + ll_bitpos == rl_bitpos
4807 && lr_bitsize + lr_bitpos == rr_bitpos)
4808 || (ll_bitpos == rl_bitpos + rl_bitsize
4809 && lr_bitpos == rr_bitpos + rr_bitsize))
4813 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4814 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4815 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4816 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4818 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4819 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4820 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4821 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4823 /* Convert to the smaller type before masking out unwanted bits. */
4825 if (lntype != rntype)
4827 if (lnbitsize > rnbitsize)
4829 lhs = fold_convert (rntype, lhs);
4830 ll_mask = fold_convert (rntype, ll_mask);
4833 else if (lnbitsize < rnbitsize)
4835 rhs = fold_convert (lntype, rhs);
4836 lr_mask = fold_convert (lntype, lr_mask);
4841 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4842 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4844 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4845 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4847 return build2 (wanted_code, truth_type, lhs, rhs);
4853 /* Handle the case of comparisons with constants. If there is something in
4854 common between the masks, those bits of the constants must be the same.
4855 If not, the condition is always false. Test for this to avoid generating
4856 incorrect code below. */
4857 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4858 if (! integer_zerop (result)
4859 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4860 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4862 if (wanted_code == NE_EXPR)
4864 warning ("`or' of unmatched not-equal tests is always 1");
4865 return constant_boolean_node (true, truth_type);
4869 warning ("`and' of mutually exclusive equal-tests is always 0");
4870 return constant_boolean_node (false, truth_type);
4874 /* Construct the expression we will return. First get the component
4875 reference we will make. Unless the mask is all ones the width of
4876 that field, perform the mask operation. Then compare with the
4878 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4879 ll_unsignedp || rl_unsignedp);
4881 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4882 if (! all_ones_mask_p (ll_mask, lnbitsize))
4883 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4885 return build2 (wanted_code, truth_type, result,
4886 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4889 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4893 optimize_minmax_comparison (tree t)
4895 tree type = TREE_TYPE (t);
4896 tree arg0 = TREE_OPERAND (t, 0);
4897 enum tree_code op_code;
4898 tree comp_const = TREE_OPERAND (t, 1);
4900 int consts_equal, consts_lt;
4903 STRIP_SIGN_NOPS (arg0);
4905 op_code = TREE_CODE (arg0);
4906 minmax_const = TREE_OPERAND (arg0, 1);
4907 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4908 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4909 inner = TREE_OPERAND (arg0, 0);
4911 /* If something does not permit us to optimize, return the original tree. */
4912 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4913 || TREE_CODE (comp_const) != INTEGER_CST
4914 || TREE_CONSTANT_OVERFLOW (comp_const)
4915 || TREE_CODE (minmax_const) != INTEGER_CST
4916 || TREE_CONSTANT_OVERFLOW (minmax_const))
4919 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4920 and GT_EXPR, doing the rest with recursive calls using logical
4922 switch (TREE_CODE (t))
4924 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4926 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4930 fold (build2 (TRUTH_ORIF_EXPR, type,
4931 optimize_minmax_comparison
4932 (build2 (EQ_EXPR, type, arg0, comp_const)),
4933 optimize_minmax_comparison
4934 (build2 (GT_EXPR, type, arg0, comp_const))));
4937 if (op_code == MAX_EXPR && consts_equal)
4938 /* MAX (X, 0) == 0 -> X <= 0 */
4939 return fold (build2 (LE_EXPR, type, inner, comp_const));
4941 else if (op_code == MAX_EXPR && consts_lt)
4942 /* MAX (X, 0) == 5 -> X == 5 */
4943 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4945 else if (op_code == MAX_EXPR)
4946 /* MAX (X, 0) == -1 -> false */
4947 return omit_one_operand (type, integer_zero_node, inner);
4949 else if (consts_equal)
4950 /* MIN (X, 0) == 0 -> X >= 0 */
4951 return fold (build2 (GE_EXPR, type, inner, comp_const));
4954 /* MIN (X, 0) == 5 -> false */
4955 return omit_one_operand (type, integer_zero_node, inner);
4958 /* MIN (X, 0) == -1 -> X == -1 */
4959 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4962 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4963 /* MAX (X, 0) > 0 -> X > 0
4964 MAX (X, 0) > 5 -> X > 5 */
4965 return fold (build2 (GT_EXPR, type, inner, comp_const));
4967 else if (op_code == MAX_EXPR)
4968 /* MAX (X, 0) > -1 -> true */
4969 return omit_one_operand (type, integer_one_node, inner);
4971 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4972 /* MIN (X, 0) > 0 -> false
4973 MIN (X, 0) > 5 -> false */
4974 return omit_one_operand (type, integer_zero_node, inner);
4977 /* MIN (X, 0) > -1 -> X > -1 */
4978 return fold (build2 (GT_EXPR, type, inner, comp_const));
4985 /* T is an integer expression that is being multiplied, divided, or taken a
4986 modulus (CODE says which and what kind of divide or modulus) by a
4987 constant C. See if we can eliminate that operation by folding it with
4988 other operations already in T. WIDE_TYPE, if non-null, is a type that
4989 should be used for the computation if wider than our type.
4991 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4992 (X * 2) + (Y * 4). We must, however, be assured that either the original
4993 expression would not overflow or that overflow is undefined for the type
4994 in the language in question.
4996 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4997 the machine has a multiply-accumulate insn or that this is part of an
4998 addressing calculation.
5000 If we return a non-null expression, it is an equivalent form of the
5001 original computation, but need not be in the original type. */
5004 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5006 /* To avoid exponential search depth, refuse to allow recursion past
5007 three levels. Beyond that (1) it's highly unlikely that we'll find
5008 something interesting and (2) we've probably processed it before
5009 when we built the inner expression. */
5018 ret = extract_muldiv_1 (t, c, code, wide_type);
5025 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5027 tree type = TREE_TYPE (t);
5028 enum tree_code tcode = TREE_CODE (t);
5029 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5030 > GET_MODE_SIZE (TYPE_MODE (type)))
5031 ? wide_type : type);
5033 int same_p = tcode == code;
5034 tree op0 = NULL_TREE, op1 = NULL_TREE;
5036 /* Don't deal with constants of zero here; they confuse the code below. */
5037 if (integer_zerop (c))
5040 if (TREE_CODE_CLASS (tcode) == '1')
5041 op0 = TREE_OPERAND (t, 0);
5043 if (TREE_CODE_CLASS (tcode) == '2')
5044 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5046 /* Note that we need not handle conditional operations here since fold
5047 already handles those cases. So just do arithmetic here. */
5051 /* For a constant, we can always simplify if we are a multiply
5052 or (for divide and modulus) if it is a multiple of our constant. */
5053 if (code == MULT_EXPR
5054 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5055 return const_binop (code, fold_convert (ctype, t),
5056 fold_convert (ctype, c), 0);
5059 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5060 /* If op0 is an expression ... */
5061 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5062 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5063 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5064 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5065 /* ... and is unsigned, and its type is smaller than ctype,
5066 then we cannot pass through as widening. */
5067 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5068 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5069 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5070 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5071 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5072 /* ... or its type is larger than ctype,
5073 then we cannot pass through this truncation. */
5074 || (GET_MODE_SIZE (TYPE_MODE (ctype))
5075 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5076 /* ... or signedness changes for division or modulus,
5077 then we cannot pass through this conversion. */
5078 || (code != MULT_EXPR
5079 && (TYPE_UNSIGNED (ctype)
5080 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5083 /* Pass the constant down and see if we can make a simplification. If
5084 we can, replace this expression with the inner simplification for
5085 possible later conversion to our or some other type. */
5086 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5087 && TREE_CODE (t2) == INTEGER_CST
5088 && ! TREE_CONSTANT_OVERFLOW (t2)
5089 && (0 != (t1 = extract_muldiv (op0, t2, code,
5091 ? ctype : NULL_TREE))))
5095 case NEGATE_EXPR: case ABS_EXPR:
5096 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5097 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5100 case MIN_EXPR: case MAX_EXPR:
5101 /* If widening the type changes the signedness, then we can't perform
5102 this optimization as that changes the result. */
5103 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5106 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5107 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5108 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5110 if (tree_int_cst_sgn (c) < 0)
5111 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5113 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5114 fold_convert (ctype, t2)));
5118 case LSHIFT_EXPR: case RSHIFT_EXPR:
5119 /* If the second operand is constant, this is a multiplication
5120 or floor division, by a power of two, so we can treat it that
5121 way unless the multiplier or divisor overflows. */
5122 if (TREE_CODE (op1) == INTEGER_CST
5123 /* const_binop may not detect overflow correctly,
5124 so check for it explicitly here. */
5125 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5126 && TREE_INT_CST_HIGH (op1) == 0
5127 && 0 != (t1 = fold_convert (ctype,
5128 const_binop (LSHIFT_EXPR,
5131 && ! TREE_OVERFLOW (t1))
5132 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5133 ? MULT_EXPR : FLOOR_DIV_EXPR,
5134 ctype, fold_convert (ctype, op0), t1),
5135 c, code, wide_type);
5138 case PLUS_EXPR: case MINUS_EXPR:
5139 /* See if we can eliminate the operation on both sides. If we can, we
5140 can return a new PLUS or MINUS. If we can't, the only remaining
5141 cases where we can do anything are if the second operand is a
5143 t1 = extract_muldiv (op0, c, code, wide_type);
5144 t2 = extract_muldiv (op1, c, code, wide_type);
5145 if (t1 != 0 && t2 != 0
5146 && (code == MULT_EXPR
5147 /* If not multiplication, we can only do this if both operands
5148 are divisible by c. */
5149 || (multiple_of_p (ctype, op0, c)
5150 && multiple_of_p (ctype, op1, c))))
5151 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5152 fold_convert (ctype, t2)));
5154 /* If this was a subtraction, negate OP1 and set it to be an addition.
5155 This simplifies the logic below. */
5156 if (tcode == MINUS_EXPR)
5157 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5159 if (TREE_CODE (op1) != INTEGER_CST)
5162 /* If either OP1 or C are negative, this optimization is not safe for
5163 some of the division and remainder types while for others we need
5164 to change the code. */
5165 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5167 if (code == CEIL_DIV_EXPR)
5168 code = FLOOR_DIV_EXPR;
5169 else if (code == FLOOR_DIV_EXPR)
5170 code = CEIL_DIV_EXPR;
5171 else if (code != MULT_EXPR
5172 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5176 /* If it's a multiply or a division/modulus operation of a multiple
5177 of our constant, do the operation and verify it doesn't overflow. */
5178 if (code == MULT_EXPR
5179 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5181 op1 = const_binop (code, fold_convert (ctype, op1),
5182 fold_convert (ctype, c), 0);
5183 /* We allow the constant to overflow with wrapping semantics. */
5185 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5191 /* If we have an unsigned type is not a sizetype, we cannot widen
5192 the operation since it will change the result if the original
5193 computation overflowed. */
5194 if (TYPE_UNSIGNED (ctype)
5195 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5199 /* If we were able to eliminate our operation from the first side,
5200 apply our operation to the second side and reform the PLUS. */
5201 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5202 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5204 /* The last case is if we are a multiply. In that case, we can
5205 apply the distributive law to commute the multiply and addition
5206 if the multiplication of the constants doesn't overflow. */
5207 if (code == MULT_EXPR)
5208 return fold (build2 (tcode, ctype,
5209 fold (build2 (code, ctype,
5210 fold_convert (ctype, op0),
5211 fold_convert (ctype, c))),
5217 /* We have a special case here if we are doing something like
5218 (C * 8) % 4 since we know that's zero. */
5219 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5220 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5221 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5222 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5223 return omit_one_operand (type, integer_zero_node, op0);
5225 /* ... fall through ... */
5227 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5228 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5229 /* If we can extract our operation from the LHS, do so and return a
5230 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5231 do something only if the second operand is a constant. */
5233 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5234 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5235 fold_convert (ctype, op1)));
5236 else if (tcode == MULT_EXPR && code == MULT_EXPR
5237 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5238 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5239 fold_convert (ctype, t1)));
5240 else if (TREE_CODE (op1) != INTEGER_CST)
5243 /* If these are the same operation types, we can associate them
5244 assuming no overflow. */
5246 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5247 fold_convert (ctype, c), 0))
5248 && ! TREE_OVERFLOW (t1))
5249 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5251 /* If these operations "cancel" each other, we have the main
5252 optimizations of this pass, which occur when either constant is a
5253 multiple of the other, in which case we replace this with either an
5254 operation or CODE or TCODE.
5256 If we have an unsigned type that is not a sizetype, we cannot do
5257 this since it will change the result if the original computation
5259 if ((! TYPE_UNSIGNED (ctype)
5260 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5262 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5263 || (tcode == MULT_EXPR
5264 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5265 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5267 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5268 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5269 fold_convert (ctype,
5270 const_binop (TRUNC_DIV_EXPR,
5272 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5273 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5274 fold_convert (ctype,
5275 const_binop (TRUNC_DIV_EXPR,
5287 /* Return a node which has the indicated constant VALUE (either 0 or
5288 1), and is of the indicated TYPE. */
5291 constant_boolean_node (int value, tree type)
5293 if (type == integer_type_node)
5294 return value ? integer_one_node : integer_zero_node;
5295 else if (type == boolean_type_node)
5296 return value ? boolean_true_node : boolean_false_node;
5297 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5298 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5299 : integer_zero_node);
5302 tree t = build_int_2 (value, 0);
5304 TREE_TYPE (t) = type;
5309 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5310 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5311 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5312 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5313 COND is the first argument to CODE; otherwise (as in the example
5314 given here), it is the second argument. TYPE is the type of the
5315 original expression. Return NULL_TREE if no simplification is
5319 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5320 tree cond, tree arg, int cond_first_p)
5322 tree test, true_value, false_value;
5323 tree lhs = NULL_TREE;
5324 tree rhs = NULL_TREE;
5326 /* This transformation is only worthwhile if we don't have to wrap
5327 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5328 one of the branches once its pushed inside the COND_EXPR. */
5329 if (!TREE_CONSTANT (arg))
5332 if (TREE_CODE (cond) == COND_EXPR)
5334 test = TREE_OPERAND (cond, 0);
5335 true_value = TREE_OPERAND (cond, 1);
5336 false_value = TREE_OPERAND (cond, 2);
5337 /* If this operand throws an expression, then it does not make
5338 sense to try to perform a logical or arithmetic operation
5340 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5342 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5347 tree testtype = TREE_TYPE (cond);
5349 true_value = constant_boolean_node (true, testtype);
5350 false_value = constant_boolean_node (false, testtype);
5354 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5355 : build2 (code, type, arg, true_value));
5357 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5358 : build2 (code, type, arg, false_value));
5360 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5361 return fold_convert (type, test);
5365 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5367 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5368 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5369 ADDEND is the same as X.
5371 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5372 and finite. The problematic cases are when X is zero, and its mode
5373 has signed zeros. In the case of rounding towards -infinity,
5374 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5375 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5378 fold_real_zero_addition_p (tree type, tree addend, int negate)
5380 if (!real_zerop (addend))
5383 /* Don't allow the fold with -fsignaling-nans. */
5384 if (HONOR_SNANS (TYPE_MODE (type)))
5387 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5388 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5391 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5392 if (TREE_CODE (addend) == REAL_CST
5393 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5396 /* The mode has signed zeros, and we have to honor their sign.
5397 In this situation, there is only one case we can return true for.
5398 X - 0 is the same as X unless rounding towards -infinity is
5400 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5403 /* Subroutine of fold() that checks comparisons of built-in math
5404 functions against real constants.
5406 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5407 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5408 is the type of the result and ARG0 and ARG1 are the operands of the
5409 comparison. ARG1 must be a TREE_REAL_CST.
5411 The function returns the constant folded tree if a simplification
5412 can be made, and NULL_TREE otherwise. */
5415 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5416 tree type, tree arg0, tree arg1)
5420 if (BUILTIN_SQRT_P (fcode))
5422 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5423 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5425 c = TREE_REAL_CST (arg1);
5426 if (REAL_VALUE_NEGATIVE (c))
5428 /* sqrt(x) < y is always false, if y is negative. */
5429 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5430 return omit_one_operand (type, integer_zero_node, arg);
5432 /* sqrt(x) > y is always true, if y is negative and we
5433 don't care about NaNs, i.e. negative values of x. */
5434 if (code == NE_EXPR || !HONOR_NANS (mode))
5435 return omit_one_operand (type, integer_one_node, arg);
5437 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5438 return fold (build2 (GE_EXPR, type, arg,
5439 build_real (TREE_TYPE (arg), dconst0)));
5441 else if (code == GT_EXPR || code == GE_EXPR)
5445 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5446 real_convert (&c2, mode, &c2);
5448 if (REAL_VALUE_ISINF (c2))
5450 /* sqrt(x) > y is x == +Inf, when y is very large. */
5451 if (HONOR_INFINITIES (mode))
5452 return fold (build2 (EQ_EXPR, type, arg,
5453 build_real (TREE_TYPE (arg), c2)));
5455 /* sqrt(x) > y is always false, when y is very large
5456 and we don't care about infinities. */
5457 return omit_one_operand (type, integer_zero_node, arg);
5460 /* sqrt(x) > c is the same as x > c*c. */
5461 return fold (build2 (code, type, arg,
5462 build_real (TREE_TYPE (arg), c2)));
5464 else if (code == LT_EXPR || code == LE_EXPR)
5468 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5469 real_convert (&c2, mode, &c2);
5471 if (REAL_VALUE_ISINF (c2))
5473 /* sqrt(x) < y is always true, when y is a very large
5474 value and we don't care about NaNs or Infinities. */
5475 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5476 return omit_one_operand (type, integer_one_node, arg);
5478 /* sqrt(x) < y is x != +Inf when y is very large and we
5479 don't care about NaNs. */
5480 if (! HONOR_NANS (mode))
5481 return fold (build2 (NE_EXPR, type, arg,
5482 build_real (TREE_TYPE (arg), c2)));
5484 /* sqrt(x) < y is x >= 0 when y is very large and we
5485 don't care about Infinities. */
5486 if (! HONOR_INFINITIES (mode))
5487 return fold (build2 (GE_EXPR, type, arg,
5488 build_real (TREE_TYPE (arg), dconst0)));
5490 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5491 if (lang_hooks.decls.global_bindings_p () != 0
5492 || CONTAINS_PLACEHOLDER_P (arg))
5495 arg = save_expr (arg);
5496 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5497 fold (build2 (GE_EXPR, type, arg,
5498 build_real (TREE_TYPE (arg),
5500 fold (build2 (NE_EXPR, type, arg,
5501 build_real (TREE_TYPE (arg),
5505 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5506 if (! HONOR_NANS (mode))
5507 return fold (build2 (code, type, arg,
5508 build_real (TREE_TYPE (arg), c2)));
5510 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5511 if (lang_hooks.decls.global_bindings_p () == 0
5512 && ! CONTAINS_PLACEHOLDER_P (arg))
5514 arg = save_expr (arg);
5515 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5516 fold (build2 (GE_EXPR, type, arg,
5517 build_real (TREE_TYPE (arg),
5519 fold (build2 (code, type, arg,
5520 build_real (TREE_TYPE (arg),
5529 /* Subroutine of fold() that optimizes comparisons against Infinities,
5530 either +Inf or -Inf.
5532 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5533 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5534 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5536 The function returns the constant folded tree if a simplification
5537 can be made, and NULL_TREE otherwise. */
5540 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5542 enum machine_mode mode;
5543 REAL_VALUE_TYPE max;
5547 mode = TYPE_MODE (TREE_TYPE (arg0));
5549 /* For negative infinity swap the sense of the comparison. */
5550 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5552 code = swap_tree_comparison (code);
5557 /* x > +Inf is always false, if with ignore sNANs. */
5558 if (HONOR_SNANS (mode))
5560 return omit_one_operand (type, integer_zero_node, arg0);
5563 /* x <= +Inf is always true, if we don't case about NaNs. */
5564 if (! HONOR_NANS (mode))
5565 return omit_one_operand (type, integer_one_node, arg0);
5567 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5568 if (lang_hooks.decls.global_bindings_p () == 0
5569 && ! CONTAINS_PLACEHOLDER_P (arg0))
5571 arg0 = save_expr (arg0);
5572 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5578 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5579 real_maxval (&max, neg, mode);
5580 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5581 arg0, build_real (TREE_TYPE (arg0), max)));
5584 /* x < +Inf is always equal to x <= DBL_MAX. */
5585 real_maxval (&max, neg, mode);
5586 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5587 arg0, build_real (TREE_TYPE (arg0), max)));
5590 /* x != +Inf is always equal to !(x > DBL_MAX). */
5591 real_maxval (&max, neg, mode);
5592 if (! HONOR_NANS (mode))
5593 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5594 arg0, build_real (TREE_TYPE (arg0), max)));
5596 /* The transformation below creates non-gimple code and thus is
5597 not appropriate if we are in gimple form. */
5601 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5602 arg0, build_real (TREE_TYPE (arg0), max)));
5603 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5612 /* Subroutine of fold() that optimizes comparisons of a division by
5613 a nonzero integer constant against an integer constant, i.e.
5616 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5617 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5618 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5620 The function returns the constant folded tree if a simplification
5621 can be made, and NULL_TREE otherwise. */
5624 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5626 tree prod, tmp, hi, lo;
5627 tree arg00 = TREE_OPERAND (arg0, 0);
5628 tree arg01 = TREE_OPERAND (arg0, 1);
5629 unsigned HOST_WIDE_INT lpart;
5630 HOST_WIDE_INT hpart;
5633 /* We have to do this the hard way to detect unsigned overflow.
5634 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5635 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5636 TREE_INT_CST_HIGH (arg01),
5637 TREE_INT_CST_LOW (arg1),
5638 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5639 prod = build_int_2 (lpart, hpart);
5640 TREE_TYPE (prod) = TREE_TYPE (arg00);
5641 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5642 || TREE_INT_CST_HIGH (prod) != hpart
5643 || TREE_INT_CST_LOW (prod) != lpart;
5644 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5646 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5648 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5651 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5652 overflow = add_double (TREE_INT_CST_LOW (prod),
5653 TREE_INT_CST_HIGH (prod),
5654 TREE_INT_CST_LOW (tmp),
5655 TREE_INT_CST_HIGH (tmp),
5657 hi = build_int_2 (lpart, hpart);
5658 TREE_TYPE (hi) = TREE_TYPE (arg00);
5659 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5660 || TREE_INT_CST_HIGH (hi) != hpart
5661 || TREE_INT_CST_LOW (hi) != lpart
5662 || TREE_OVERFLOW (prod);
5663 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5665 else if (tree_int_cst_sgn (arg01) >= 0)
5667 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5668 switch (tree_int_cst_sgn (arg1))
5671 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5676 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5681 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5691 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5692 switch (tree_int_cst_sgn (arg1))
5695 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5700 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5705 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5717 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5718 return omit_one_operand (type, integer_zero_node, arg00);
5719 if (TREE_OVERFLOW (hi))
5720 return fold (build2 (GE_EXPR, type, arg00, lo));
5721 if (TREE_OVERFLOW (lo))
5722 return fold (build2 (LE_EXPR, type, arg00, hi));
5723 return build_range_check (type, arg00, 1, lo, hi);
5726 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5727 return omit_one_operand (type, integer_one_node, arg00);
5728 if (TREE_OVERFLOW (hi))
5729 return fold (build2 (LT_EXPR, type, arg00, lo));
5730 if (TREE_OVERFLOW (lo))
5731 return fold (build2 (GT_EXPR, type, arg00, hi));
5732 return build_range_check (type, arg00, 0, lo, hi);
5735 if (TREE_OVERFLOW (lo))
5736 return omit_one_operand (type, integer_zero_node, arg00);
5737 return fold (build2 (LT_EXPR, type, arg00, lo));
5740 if (TREE_OVERFLOW (hi))
5741 return omit_one_operand (type, integer_one_node, arg00);
5742 return fold (build2 (LE_EXPR, type, arg00, hi));
5745 if (TREE_OVERFLOW (hi))
5746 return omit_one_operand (type, integer_zero_node, arg00);
5747 return fold (build2 (GT_EXPR, type, arg00, hi));
5750 if (TREE_OVERFLOW (lo))
5751 return omit_one_operand (type, integer_one_node, arg00);
5752 return fold (build2 (GE_EXPR, type, arg00, lo));
5762 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5763 equality/inequality test, then return a simplified form of
5764 the test using shifts and logical operations. Otherwise return
5765 NULL. TYPE is the desired result type. */
5768 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5771 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5773 if (code == TRUTH_NOT_EXPR)
5775 code = TREE_CODE (arg0);
5776 if (code != NE_EXPR && code != EQ_EXPR)
5779 /* Extract the arguments of the EQ/NE. */
5780 arg1 = TREE_OPERAND (arg0, 1);
5781 arg0 = TREE_OPERAND (arg0, 0);
5783 /* This requires us to invert the code. */
5784 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5787 /* If this is testing a single bit, we can optimize the test. */
5788 if ((code == NE_EXPR || code == EQ_EXPR)
5789 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5790 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5792 tree inner = TREE_OPERAND (arg0, 0);
5793 tree type = TREE_TYPE (arg0);
5794 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5795 enum machine_mode operand_mode = TYPE_MODE (type);
5797 tree signed_type, unsigned_type, intermediate_type;
5800 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5801 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5802 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5803 if (arg00 != NULL_TREE
5804 /* This is only a win if casting to a signed type is cheap,
5805 i.e. when arg00's type is not a partial mode. */
5806 && TYPE_PRECISION (TREE_TYPE (arg00))
5807 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5809 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5810 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5811 result_type, fold_convert (stype, arg00),
5812 fold_convert (stype, integer_zero_node)));
5815 /* Otherwise we have (A & C) != 0 where C is a single bit,
5816 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5817 Similarly for (A & C) == 0. */
5819 /* If INNER is a right shift of a constant and it plus BITNUM does
5820 not overflow, adjust BITNUM and INNER. */
5821 if (TREE_CODE (inner) == RSHIFT_EXPR
5822 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5823 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5824 && bitnum < TYPE_PRECISION (type)
5825 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5826 bitnum - TYPE_PRECISION (type)))
5828 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5829 inner = TREE_OPERAND (inner, 0);
5832 /* If we are going to be able to omit the AND below, we must do our
5833 operations as unsigned. If we must use the AND, we have a choice.
5834 Normally unsigned is faster, but for some machines signed is. */
5835 #ifdef LOAD_EXTEND_OP
5836 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5841 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5842 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5843 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5844 inner = fold_convert (intermediate_type, inner);
5847 inner = build2 (RSHIFT_EXPR, intermediate_type,
5848 inner, size_int (bitnum));
5850 if (code == EQ_EXPR)
5851 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5852 inner, integer_one_node);
5854 /* Put the AND last so it can combine with more things. */
5855 inner = build2 (BIT_AND_EXPR, intermediate_type,
5856 inner, integer_one_node);
5858 /* Make sure to return the proper type. */
5859 inner = fold_convert (result_type, inner);
5866 /* Check whether we are allowed to reorder operands arg0 and arg1,
5867 such that the evaluation of arg1 occurs before arg0. */
5870 reorder_operands_p (tree arg0, tree arg1)
5872 if (! flag_evaluation_order)
5874 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5876 return ! TREE_SIDE_EFFECTS (arg0)
5877 && ! TREE_SIDE_EFFECTS (arg1);
5880 /* Test whether it is preferable two swap two operands, ARG0 and
5881 ARG1, for example because ARG0 is an integer constant and ARG1
5882 isn't. If REORDER is true, only recommend swapping if we can
5883 evaluate the operands in reverse order. */
5886 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5888 STRIP_SIGN_NOPS (arg0);
5889 STRIP_SIGN_NOPS (arg1);
5891 if (TREE_CODE (arg1) == INTEGER_CST)
5893 if (TREE_CODE (arg0) == INTEGER_CST)
5896 if (TREE_CODE (arg1) == REAL_CST)
5898 if (TREE_CODE (arg0) == REAL_CST)
5901 if (TREE_CODE (arg1) == COMPLEX_CST)
5903 if (TREE_CODE (arg0) == COMPLEX_CST)
5906 if (TREE_CONSTANT (arg1))
5908 if (TREE_CONSTANT (arg0))
5914 if (reorder && flag_evaluation_order
5915 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5923 if (reorder && flag_evaluation_order
5924 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5932 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5933 for commutative and comparison operators. Ensuring a canonical
5934 form allows the optimizers to find additional redundancies without
5935 having to explicitly check for both orderings. */
5936 if (TREE_CODE (arg0) == SSA_NAME
5937 && TREE_CODE (arg1) == SSA_NAME
5938 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5944 /* Perform constant folding and related simplification of EXPR.
5945 The related simplifications include x*1 => x, x*0 => 0, etc.,
5946 and application of the associative law.
5947 NOP_EXPR conversions may be removed freely (as long as we
5948 are careful not to change the type of the overall expression).
5949 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5950 but we can constant-fold them if they have constant operands. */
5952 #ifdef ENABLE_FOLD_CHECKING
5953 # define fold(x) fold_1 (x)
5954 static tree fold_1 (tree);
5960 const tree t = expr;
5961 const tree type = TREE_TYPE (expr);
5962 tree t1 = NULL_TREE;
5964 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5965 enum tree_code code = TREE_CODE (t);
5966 int kind = TREE_CODE_CLASS (code);
5968 /* WINS will be nonzero when the switch is done
5969 if all operands are constant. */
5972 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5973 Likewise for a SAVE_EXPR that's already been evaluated. */
5974 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5977 /* Return right away if a constant. */
5981 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5985 /* Special case for conversion ops that can have fixed point args. */
5986 arg0 = TREE_OPERAND (t, 0);
5988 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5990 STRIP_SIGN_NOPS (arg0);
5992 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5993 subop = TREE_REALPART (arg0);
5997 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5998 && TREE_CODE (subop) != REAL_CST)
5999 /* Note that TREE_CONSTANT isn't enough:
6000 static var addresses are constant but we can't
6001 do arithmetic on them. */
6004 else if (IS_EXPR_CODE_CLASS (kind))
6006 int len = first_rtl_op (code);
6008 for (i = 0; i < len; i++)
6010 tree op = TREE_OPERAND (t, i);
6014 continue; /* Valid for CALL_EXPR, at least. */
6016 /* Strip any conversions that don't change the mode. This is
6017 safe for every expression, except for a comparison expression
6018 because its signedness is derived from its operands. So, in
6019 the latter case, only strip conversions that don't change the
6022 Note that this is done as an internal manipulation within the
6023 constant folder, in order to find the simplest representation
6024 of the arguments so that their form can be studied. In any
6025 cases, the appropriate type conversions should be put back in
6026 the tree that will get out of the constant folder. */
6028 STRIP_SIGN_NOPS (op);
6032 if (TREE_CODE (op) == COMPLEX_CST)
6033 subop = TREE_REALPART (op);
6037 if (TREE_CODE (subop) != INTEGER_CST
6038 && TREE_CODE (subop) != REAL_CST)
6039 /* Note that TREE_CONSTANT isn't enough:
6040 static var addresses are constant but we can't
6041 do arithmetic on them. */
6051 /* If this is a commutative operation, and ARG0 is a constant, move it
6052 to ARG1 to reduce the number of tests below. */
6053 if (commutative_tree_code (code)
6054 && tree_swap_operands_p (arg0, arg1, true))
6055 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6056 TREE_OPERAND (t, 0)));
6058 /* Now WINS is set as described above,
6059 ARG0 is the first operand of EXPR,
6060 and ARG1 is the second operand (if it has more than one operand).
6062 First check for cases where an arithmetic operation is applied to a
6063 compound, conditional, or comparison operation. Push the arithmetic
6064 operation inside the compound or conditional to see if any folding
6065 can then be done. Convert comparison to conditional for this purpose.
6066 The also optimizes non-constant cases that used to be done in
6069 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6070 one of the operands is a comparison and the other is a comparison, a
6071 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6072 code below would make the expression more complex. Change it to a
6073 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6074 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6076 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6077 || code == EQ_EXPR || code == NE_EXPR)
6078 && ((truth_value_p (TREE_CODE (arg0))
6079 && (truth_value_p (TREE_CODE (arg1))
6080 || (TREE_CODE (arg1) == BIT_AND_EXPR
6081 && integer_onep (TREE_OPERAND (arg1, 1)))))
6082 || (truth_value_p (TREE_CODE (arg1))
6083 && (truth_value_p (TREE_CODE (arg0))
6084 || (TREE_CODE (arg0) == BIT_AND_EXPR
6085 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6087 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6088 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6090 type, fold_convert (boolean_type_node, arg0),
6091 fold_convert (boolean_type_node, arg1)));
6093 if (code == EQ_EXPR)
6094 tem = invert_truthvalue (tem);
6099 if (TREE_CODE_CLASS (code) == '1')
6101 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6102 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6103 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6104 else if (TREE_CODE (arg0) == COND_EXPR)
6106 tree arg01 = TREE_OPERAND (arg0, 1);
6107 tree arg02 = TREE_OPERAND (arg0, 2);
6108 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6109 arg01 = fold (build1 (code, type, arg01));
6110 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6111 arg02 = fold (build1 (code, type, arg02));
6112 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6115 /* If this was a conversion, and all we did was to move into
6116 inside the COND_EXPR, bring it back out. But leave it if
6117 it is a conversion from integer to integer and the
6118 result precision is no wider than a word since such a
6119 conversion is cheap and may be optimized away by combine,
6120 while it couldn't if it were outside the COND_EXPR. Then return
6121 so we don't get into an infinite recursion loop taking the
6122 conversion out and then back in. */
6124 if ((code == NOP_EXPR || code == CONVERT_EXPR
6125 || code == NON_LVALUE_EXPR)
6126 && TREE_CODE (tem) == COND_EXPR
6127 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6128 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6129 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6130 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6131 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6132 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6133 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6135 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6136 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6137 tem = build1 (code, type,
6139 TREE_TYPE (TREE_OPERAND
6140 (TREE_OPERAND (tem, 1), 0)),
6141 TREE_OPERAND (tem, 0),
6142 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6143 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6146 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6148 if (TREE_CODE (type) == BOOLEAN_TYPE)
6150 arg0 = copy_node (arg0);
6151 TREE_TYPE (arg0) = type;
6154 else if (TREE_CODE (type) != INTEGER_TYPE)
6155 return fold (build3 (COND_EXPR, type, arg0,
6156 fold (build1 (code, type,
6158 fold (build1 (code, type,
6159 integer_zero_node))));
6162 else if (TREE_CODE_CLASS (code) == '<'
6163 && TREE_CODE (arg0) == COMPOUND_EXPR)
6164 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6165 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6166 else if (TREE_CODE_CLASS (code) == '<'
6167 && TREE_CODE (arg1) == COMPOUND_EXPR)
6168 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6169 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6170 else if (TREE_CODE_CLASS (code) == '2'
6171 || TREE_CODE_CLASS (code) == '<')
6173 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6174 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6175 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6177 if (TREE_CODE (arg1) == COMPOUND_EXPR
6178 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6179 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6180 fold (build2 (code, type,
6181 arg0, TREE_OPERAND (arg1, 1))));
6183 if (TREE_CODE (arg0) == COND_EXPR
6184 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6186 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6187 /*cond_first_p=*/1);
6188 if (tem != NULL_TREE)
6192 if (TREE_CODE (arg1) == COND_EXPR
6193 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6195 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6196 /*cond_first_p=*/0);
6197 if (tem != NULL_TREE)
6205 return fold (DECL_INITIAL (t));
6210 case FIX_TRUNC_EXPR:
6212 case FIX_FLOOR_EXPR:
6213 case FIX_ROUND_EXPR:
6214 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6215 return TREE_OPERAND (t, 0);
6217 /* Handle cases of two conversions in a row. */
6218 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6219 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6221 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6222 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6223 int inside_int = INTEGRAL_TYPE_P (inside_type);
6224 int inside_ptr = POINTER_TYPE_P (inside_type);
6225 int inside_float = FLOAT_TYPE_P (inside_type);
6226 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6227 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6228 int inter_int = INTEGRAL_TYPE_P (inter_type);
6229 int inter_ptr = POINTER_TYPE_P (inter_type);
6230 int inter_float = FLOAT_TYPE_P (inter_type);
6231 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6232 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6233 int final_int = INTEGRAL_TYPE_P (type);
6234 int final_ptr = POINTER_TYPE_P (type);
6235 int final_float = FLOAT_TYPE_P (type);
6236 unsigned int final_prec = TYPE_PRECISION (type);
6237 int final_unsignedp = TYPE_UNSIGNED (type);
6239 /* In addition to the cases of two conversions in a row
6240 handled below, if we are converting something to its own
6241 type via an object of identical or wider precision, neither
6242 conversion is needed. */
6243 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6244 && ((inter_int && final_int) || (inter_float && final_float))
6245 && inter_prec >= final_prec)
6246 return fold (build1 (code, type,
6247 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6249 /* Likewise, if the intermediate and final types are either both
6250 float or both integer, we don't need the middle conversion if
6251 it is wider than the final type and doesn't change the signedness
6252 (for integers). Avoid this if the final type is a pointer
6253 since then we sometimes need the inner conversion. Likewise if
6254 the outer has a precision not equal to the size of its mode. */
6255 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6256 || (inter_float && inside_float))
6257 && inter_prec >= inside_prec
6258 && (inter_float || inter_unsignedp == inside_unsignedp)
6259 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6260 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6262 return fold (build1 (code, type,
6263 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6265 /* If we have a sign-extension of a zero-extended value, we can
6266 replace that by a single zero-extension. */
6267 if (inside_int && inter_int && final_int
6268 && inside_prec < inter_prec && inter_prec < final_prec
6269 && inside_unsignedp && !inter_unsignedp)
6270 return fold (build1 (code, type,
6271 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6273 /* Two conversions in a row are not needed unless:
6274 - some conversion is floating-point (overstrict for now), or
6275 - the intermediate type is narrower than both initial and
6277 - the intermediate type and innermost type differ in signedness,
6278 and the outermost type is wider than the intermediate, or
6279 - the initial type is a pointer type and the precisions of the
6280 intermediate and final types differ, or
6281 - the final type is a pointer type and the precisions of the
6282 initial and intermediate types differ. */
6283 if (! inside_float && ! inter_float && ! final_float
6284 && (inter_prec > inside_prec || inter_prec > final_prec)
6285 && ! (inside_int && inter_int
6286 && inter_unsignedp != inside_unsignedp
6287 && inter_prec < final_prec)
6288 && ((inter_unsignedp && inter_prec > inside_prec)
6289 == (final_unsignedp && final_prec > inter_prec))
6290 && ! (inside_ptr && inter_prec != final_prec)
6291 && ! (final_ptr && inside_prec != inter_prec)
6292 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6293 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6295 return fold (build1 (code, type,
6296 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6299 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6300 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6301 /* Detect assigning a bitfield. */
6302 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6303 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6305 /* Don't leave an assignment inside a conversion
6306 unless assigning a bitfield. */
6307 tree prev = TREE_OPERAND (t, 0);
6308 tem = copy_node (t);
6309 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6310 /* First do the assignment, then return converted constant. */
6311 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6312 TREE_NO_WARNING (tem) = 1;
6313 TREE_USED (tem) = 1;
6317 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6318 constants (if x has signed type, the sign bit cannot be set
6319 in c). This folds extension into the BIT_AND_EXPR. */
6320 if (INTEGRAL_TYPE_P (type)
6321 && TREE_CODE (type) != BOOLEAN_TYPE
6322 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6323 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6325 tree and = TREE_OPERAND (t, 0);
6326 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6329 if (TYPE_UNSIGNED (TREE_TYPE (and))
6330 || (TYPE_PRECISION (type)
6331 <= TYPE_PRECISION (TREE_TYPE (and))))
6333 else if (TYPE_PRECISION (TREE_TYPE (and1))
6334 <= HOST_BITS_PER_WIDE_INT
6335 && host_integerp (and1, 1))
6337 unsigned HOST_WIDE_INT cst;
6339 cst = tree_low_cst (and1, 1);
6340 cst &= (HOST_WIDE_INT) -1
6341 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6342 change = (cst == 0);
6343 #ifdef LOAD_EXTEND_OP
6345 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6348 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6349 and0 = fold_convert (uns, and0);
6350 and1 = fold_convert (uns, and1);
6355 return fold (build2 (BIT_AND_EXPR, type,
6356 fold_convert (type, and0),
6357 fold_convert (type, and1)));
6360 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6361 T2 being pointers to types of the same size. */
6362 if (POINTER_TYPE_P (TREE_TYPE (t))
6363 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6364 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6365 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6367 tree arg00 = TREE_OPERAND (arg0, 0);
6368 tree t0 = TREE_TYPE (t);
6369 tree t1 = TREE_TYPE (arg00);
6370 tree tt0 = TREE_TYPE (t0);
6371 tree tt1 = TREE_TYPE (t1);
6372 tree s0 = TYPE_SIZE (tt0);
6373 tree s1 = TYPE_SIZE (tt1);
6375 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6376 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6377 TREE_OPERAND (arg0, 1));
6380 tem = fold_convert_const (code, type, arg0);
6381 return tem ? tem : t;
6383 case VIEW_CONVERT_EXPR:
6384 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6385 return build1 (VIEW_CONVERT_EXPR, type,
6386 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6390 if (TREE_CODE (arg0) == CONSTRUCTOR
6391 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6393 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6395 return TREE_VALUE (m);
6400 if (TREE_CONSTANT (t) != wins)
6402 tem = copy_node (t);
6403 TREE_CONSTANT (tem) = wins;
6404 TREE_INVARIANT (tem) = wins;
6410 if (negate_expr_p (arg0))
6411 return fold_convert (type, negate_expr (arg0));
6415 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6416 return fold_abs_const (arg0, type);
6417 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6418 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6419 /* Convert fabs((double)float) into (double)fabsf(float). */
6420 else if (TREE_CODE (arg0) == NOP_EXPR
6421 && TREE_CODE (type) == REAL_TYPE)
6423 tree targ0 = strip_float_extensions (arg0);
6425 return fold_convert (type, fold (build1 (ABS_EXPR,
6429 else if (tree_expr_nonnegative_p (arg0))
6434 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6435 return fold_convert (type, arg0);
6436 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6437 return build2 (COMPLEX_EXPR, type,
6438 TREE_OPERAND (arg0, 0),
6439 negate_expr (TREE_OPERAND (arg0, 1)));
6440 else if (TREE_CODE (arg0) == COMPLEX_CST)
6441 return build_complex (type, TREE_REALPART (arg0),
6442 negate_expr (TREE_IMAGPART (arg0)));
6443 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6444 return fold (build2 (TREE_CODE (arg0), type,
6445 fold (build1 (CONJ_EXPR, type,
6446 TREE_OPERAND (arg0, 0))),
6447 fold (build1 (CONJ_EXPR, type,
6448 TREE_OPERAND (arg0, 1)))));
6449 else if (TREE_CODE (arg0) == CONJ_EXPR)
6450 return TREE_OPERAND (arg0, 0);
6454 if (TREE_CODE (arg0) == INTEGER_CST)
6455 return fold_not_const (arg0, type);
6456 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6457 return TREE_OPERAND (arg0, 0);
6461 /* A + (-B) -> A - B */
6462 if (TREE_CODE (arg1) == NEGATE_EXPR)
6463 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6464 /* (-A) + B -> B - A */
6465 if (TREE_CODE (arg0) == NEGATE_EXPR
6466 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6467 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6468 if (! FLOAT_TYPE_P (type))
6470 if (integer_zerop (arg1))
6471 return non_lvalue (fold_convert (type, arg0));
6473 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6474 with a constant, and the two constants have no bits in common,
6475 we should treat this as a BIT_IOR_EXPR since this may produce more
6477 if (TREE_CODE (arg0) == BIT_AND_EXPR
6478 && TREE_CODE (arg1) == BIT_AND_EXPR
6479 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6480 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6481 && integer_zerop (const_binop (BIT_AND_EXPR,
6482 TREE_OPERAND (arg0, 1),
6483 TREE_OPERAND (arg1, 1), 0)))
6485 code = BIT_IOR_EXPR;
6489 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6490 (plus (plus (mult) (mult)) (foo)) so that we can
6491 take advantage of the factoring cases below. */
6492 if ((TREE_CODE (arg0) == PLUS_EXPR
6493 && TREE_CODE (arg1) == MULT_EXPR)
6494 || (TREE_CODE (arg1) == PLUS_EXPR
6495 && TREE_CODE (arg0) == MULT_EXPR))
6497 tree parg0, parg1, parg, marg;
6499 if (TREE_CODE (arg0) == PLUS_EXPR)
6500 parg = arg0, marg = arg1;
6502 parg = arg1, marg = arg0;
6503 parg0 = TREE_OPERAND (parg, 0);
6504 parg1 = TREE_OPERAND (parg, 1);
6508 if (TREE_CODE (parg0) == MULT_EXPR
6509 && TREE_CODE (parg1) != MULT_EXPR)
6510 return fold (build2 (PLUS_EXPR, type,
6511 fold (build2 (PLUS_EXPR, type,
6512 fold_convert (type, parg0),
6513 fold_convert (type, marg))),
6514 fold_convert (type, parg1)));
6515 if (TREE_CODE (parg0) != MULT_EXPR
6516 && TREE_CODE (parg1) == MULT_EXPR)
6517 return fold (build2 (PLUS_EXPR, type,
6518 fold (build2 (PLUS_EXPR, type,
6519 fold_convert (type, parg1),
6520 fold_convert (type, marg))),
6521 fold_convert (type, parg0)));
6524 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6526 tree arg00, arg01, arg10, arg11;
6527 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6529 /* (A * C) + (B * C) -> (A+B) * C.
6530 We are most concerned about the case where C is a constant,
6531 but other combinations show up during loop reduction. Since
6532 it is not difficult, try all four possibilities. */
6534 arg00 = TREE_OPERAND (arg0, 0);
6535 arg01 = TREE_OPERAND (arg0, 1);
6536 arg10 = TREE_OPERAND (arg1, 0);
6537 arg11 = TREE_OPERAND (arg1, 1);
6540 if (operand_equal_p (arg01, arg11, 0))
6541 same = arg01, alt0 = arg00, alt1 = arg10;
6542 else if (operand_equal_p (arg00, arg10, 0))
6543 same = arg00, alt0 = arg01, alt1 = arg11;
6544 else if (operand_equal_p (arg00, arg11, 0))
6545 same = arg00, alt0 = arg01, alt1 = arg10;
6546 else if (operand_equal_p (arg01, arg10, 0))
6547 same = arg01, alt0 = arg00, alt1 = arg11;
6549 /* No identical multiplicands; see if we can find a common
6550 power-of-two factor in non-power-of-two multiplies. This
6551 can help in multi-dimensional array access. */
6552 else if (TREE_CODE (arg01) == INTEGER_CST
6553 && TREE_CODE (arg11) == INTEGER_CST
6554 && TREE_INT_CST_HIGH (arg01) == 0
6555 && TREE_INT_CST_HIGH (arg11) == 0)
6557 HOST_WIDE_INT int01, int11, tmp;
6558 int01 = TREE_INT_CST_LOW (arg01);
6559 int11 = TREE_INT_CST_LOW (arg11);
6561 /* Move min of absolute values to int11. */
6562 if ((int01 >= 0 ? int01 : -int01)
6563 < (int11 >= 0 ? int11 : -int11))
6565 tmp = int01, int01 = int11, int11 = tmp;
6566 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6567 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6570 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6572 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6573 build_int_2 (int01 / int11, 0)));
6580 return fold (build2 (MULT_EXPR, type,
6581 fold (build2 (PLUS_EXPR, type,
6588 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6589 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6590 return non_lvalue (fold_convert (type, arg0));
6592 /* Likewise if the operands are reversed. */
6593 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6594 return non_lvalue (fold_convert (type, arg1));
6596 /* Convert x+x into x*2.0. */
6597 if (operand_equal_p (arg0, arg1, 0)
6598 && SCALAR_FLOAT_TYPE_P (type))
6599 return fold (build2 (MULT_EXPR, type, arg0,
6600 build_real (type, dconst2)));
6602 /* Convert x*c+x into x*(c+1). */
6603 if (flag_unsafe_math_optimizations
6604 && TREE_CODE (arg0) == MULT_EXPR
6605 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6606 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6607 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6611 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6612 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6613 return fold (build2 (MULT_EXPR, type, arg1,
6614 build_real (type, c)));
6617 /* Convert x+x*c into x*(c+1). */
6618 if (flag_unsafe_math_optimizations
6619 && TREE_CODE (arg1) == MULT_EXPR
6620 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6621 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6622 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6626 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6627 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6628 return fold (build2 (MULT_EXPR, type, arg0,
6629 build_real (type, c)));
6632 /* Convert x*c1+x*c2 into x*(c1+c2). */
6633 if (flag_unsafe_math_optimizations
6634 && TREE_CODE (arg0) == MULT_EXPR
6635 && TREE_CODE (arg1) == MULT_EXPR
6636 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6637 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6638 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6639 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6640 && operand_equal_p (TREE_OPERAND (arg0, 0),
6641 TREE_OPERAND (arg1, 0), 0))
6643 REAL_VALUE_TYPE c1, c2;
6645 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6646 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6647 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6648 return fold (build2 (MULT_EXPR, type,
6649 TREE_OPERAND (arg0, 0),
6650 build_real (type, c1)));
6652 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6653 if (flag_unsafe_math_optimizations
6654 && TREE_CODE (arg1) == PLUS_EXPR
6655 && TREE_CODE (arg0) != MULT_EXPR)
6657 tree tree10 = TREE_OPERAND (arg1, 0);
6658 tree tree11 = TREE_OPERAND (arg1, 1);
6659 if (TREE_CODE (tree11) == MULT_EXPR
6660 && TREE_CODE (tree10) == MULT_EXPR)
6663 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6664 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6667 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6668 if (flag_unsafe_math_optimizations
6669 && TREE_CODE (arg0) == PLUS_EXPR
6670 && TREE_CODE (arg1) != MULT_EXPR)
6672 tree tree00 = TREE_OPERAND (arg0, 0);
6673 tree tree01 = TREE_OPERAND (arg0, 1);
6674 if (TREE_CODE (tree01) == MULT_EXPR
6675 && TREE_CODE (tree00) == MULT_EXPR)
6678 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6679 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6685 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6686 is a rotate of A by C1 bits. */
6687 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6688 is a rotate of A by B bits. */
6690 enum tree_code code0, code1;
6691 code0 = TREE_CODE (arg0);
6692 code1 = TREE_CODE (arg1);
6693 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6694 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6695 && operand_equal_p (TREE_OPERAND (arg0, 0),
6696 TREE_OPERAND (arg1, 0), 0)
6697 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6699 tree tree01, tree11;
6700 enum tree_code code01, code11;
6702 tree01 = TREE_OPERAND (arg0, 1);
6703 tree11 = TREE_OPERAND (arg1, 1);
6704 STRIP_NOPS (tree01);
6705 STRIP_NOPS (tree11);
6706 code01 = TREE_CODE (tree01);
6707 code11 = TREE_CODE (tree11);
6708 if (code01 == INTEGER_CST
6709 && code11 == INTEGER_CST
6710 && TREE_INT_CST_HIGH (tree01) == 0
6711 && TREE_INT_CST_HIGH (tree11) == 0
6712 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6713 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6714 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6715 code0 == LSHIFT_EXPR ? tree01 : tree11);
6716 else if (code11 == MINUS_EXPR)
6718 tree tree110, tree111;
6719 tree110 = TREE_OPERAND (tree11, 0);
6720 tree111 = TREE_OPERAND (tree11, 1);
6721 STRIP_NOPS (tree110);
6722 STRIP_NOPS (tree111);
6723 if (TREE_CODE (tree110) == INTEGER_CST
6724 && 0 == compare_tree_int (tree110,
6726 (TREE_TYPE (TREE_OPERAND
6728 && operand_equal_p (tree01, tree111, 0))
6729 return build2 ((code0 == LSHIFT_EXPR
6732 type, TREE_OPERAND (arg0, 0), tree01);
6734 else if (code01 == MINUS_EXPR)
6736 tree tree010, tree011;
6737 tree010 = TREE_OPERAND (tree01, 0);
6738 tree011 = TREE_OPERAND (tree01, 1);
6739 STRIP_NOPS (tree010);
6740 STRIP_NOPS (tree011);
6741 if (TREE_CODE (tree010) == INTEGER_CST
6742 && 0 == compare_tree_int (tree010,
6744 (TREE_TYPE (TREE_OPERAND
6746 && operand_equal_p (tree11, tree011, 0))
6747 return build2 ((code0 != LSHIFT_EXPR
6750 type, TREE_OPERAND (arg0, 0), tree11);
6756 /* In most languages, can't associate operations on floats through
6757 parentheses. Rather than remember where the parentheses were, we
6758 don't associate floats at all, unless the user has specified
6759 -funsafe-math-optimizations. */
6762 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6764 tree var0, con0, lit0, minus_lit0;
6765 tree var1, con1, lit1, minus_lit1;
6767 /* Split both trees into variables, constants, and literals. Then
6768 associate each group together, the constants with literals,
6769 then the result with variables. This increases the chances of
6770 literals being recombined later and of generating relocatable
6771 expressions for the sum of a constant and literal. */
6772 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6773 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6774 code == MINUS_EXPR);
6776 /* Only do something if we found more than two objects. Otherwise,
6777 nothing has changed and we risk infinite recursion. */
6778 if (2 < ((var0 != 0) + (var1 != 0)
6779 + (con0 != 0) + (con1 != 0)
6780 + (lit0 != 0) + (lit1 != 0)
6781 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6783 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6784 if (code == MINUS_EXPR)
6787 var0 = associate_trees (var0, var1, code, type);
6788 con0 = associate_trees (con0, con1, code, type);
6789 lit0 = associate_trees (lit0, lit1, code, type);
6790 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6792 /* Preserve the MINUS_EXPR if the negative part of the literal is
6793 greater than the positive part. Otherwise, the multiplicative
6794 folding code (i.e extract_muldiv) may be fooled in case
6795 unsigned constants are subtracted, like in the following
6796 example: ((X*2 + 4) - 8U)/2. */
6797 if (minus_lit0 && lit0)
6799 if (TREE_CODE (lit0) == INTEGER_CST
6800 && TREE_CODE (minus_lit0) == INTEGER_CST
6801 && tree_int_cst_lt (lit0, minus_lit0))
6803 minus_lit0 = associate_trees (minus_lit0, lit0,
6809 lit0 = associate_trees (lit0, minus_lit0,
6817 return fold_convert (type,
6818 associate_trees (var0, minus_lit0,
6822 con0 = associate_trees (con0, minus_lit0,
6824 return fold_convert (type,
6825 associate_trees (var0, con0,
6830 con0 = associate_trees (con0, lit0, code, type);
6831 return fold_convert (type, associate_trees (var0, con0,
6838 t1 = const_binop (code, arg0, arg1, 0);
6839 if (t1 != NULL_TREE)
6841 /* The return value should always have
6842 the same type as the original expression. */
6843 if (TREE_TYPE (t1) != type)
6844 t1 = fold_convert (type, t1);
6851 /* A - (-B) -> A + B */
6852 if (TREE_CODE (arg1) == NEGATE_EXPR)
6853 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6854 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6855 if (TREE_CODE (arg0) == NEGATE_EXPR
6856 && (FLOAT_TYPE_P (type)
6857 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6858 && negate_expr_p (arg1)
6859 && reorder_operands_p (arg0, arg1))
6860 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6861 TREE_OPERAND (arg0, 0)));
6863 if (! FLOAT_TYPE_P (type))
6865 if (! wins && integer_zerop (arg0))
6866 return negate_expr (fold_convert (type, arg1));
6867 if (integer_zerop (arg1))
6868 return non_lvalue (fold_convert (type, arg0));
6870 /* Fold A - (A & B) into ~B & A. */
6871 if (!TREE_SIDE_EFFECTS (arg0)
6872 && TREE_CODE (arg1) == BIT_AND_EXPR)
6874 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6875 return fold (build2 (BIT_AND_EXPR, type,
6876 fold (build1 (BIT_NOT_EXPR, type,
6877 TREE_OPERAND (arg1, 0))),
6879 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6880 return fold (build2 (BIT_AND_EXPR, type,
6881 fold (build1 (BIT_NOT_EXPR, type,
6882 TREE_OPERAND (arg1, 1))),
6886 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6887 any power of 2 minus 1. */
6888 if (TREE_CODE (arg0) == BIT_AND_EXPR
6889 && TREE_CODE (arg1) == BIT_AND_EXPR
6890 && operand_equal_p (TREE_OPERAND (arg0, 0),
6891 TREE_OPERAND (arg1, 0), 0))
6893 tree mask0 = TREE_OPERAND (arg0, 1);
6894 tree mask1 = TREE_OPERAND (arg1, 1);
6895 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6897 if (operand_equal_p (tem, mask1, 0))
6899 tem = fold (build2 (BIT_XOR_EXPR, type,
6900 TREE_OPERAND (arg0, 0), mask1));
6901 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6906 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6907 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6908 return non_lvalue (fold_convert (type, arg0));
6910 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6911 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6912 (-ARG1 + ARG0) reduces to -ARG1. */
6913 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6914 return negate_expr (fold_convert (type, arg1));
6916 /* Fold &x - &x. This can happen from &x.foo - &x.
6917 This is unsafe for certain floats even in non-IEEE formats.
6918 In IEEE, it is unsafe because it does wrong for NaNs.
6919 Also note that operand_equal_p is always false if an operand
6922 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6923 && operand_equal_p (arg0, arg1, 0))
6924 return fold_convert (type, integer_zero_node);
6926 /* A - B -> A + (-B) if B is easily negatable. */
6927 if (!wins && negate_expr_p (arg1)
6928 && (FLOAT_TYPE_P (type)
6929 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6930 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6932 if (TREE_CODE (arg0) == MULT_EXPR
6933 && TREE_CODE (arg1) == MULT_EXPR
6934 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6936 /* (A * C) - (B * C) -> (A-B) * C. */
6937 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6938 TREE_OPERAND (arg1, 1), 0))
6939 return fold (build2 (MULT_EXPR, type,
6940 fold (build2 (MINUS_EXPR, type,
6941 TREE_OPERAND (arg0, 0),
6942 TREE_OPERAND (arg1, 0))),
6943 TREE_OPERAND (arg0, 1)));
6944 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6945 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6946 TREE_OPERAND (arg1, 0), 0))
6947 return fold (build2 (MULT_EXPR, type,
6948 TREE_OPERAND (arg0, 0),
6949 fold (build2 (MINUS_EXPR, type,
6950 TREE_OPERAND (arg0, 1),
6951 TREE_OPERAND (arg1, 1)))));
6957 /* (-A) * (-B) -> A * B */
6958 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6959 return fold (build2 (MULT_EXPR, type,
6960 TREE_OPERAND (arg0, 0),
6961 negate_expr (arg1)));
6962 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6963 return fold (build2 (MULT_EXPR, type,
6965 TREE_OPERAND (arg1, 0)));
6967 if (! FLOAT_TYPE_P (type))
6969 if (integer_zerop (arg1))
6970 return omit_one_operand (type, arg1, arg0);
6971 if (integer_onep (arg1))
6972 return non_lvalue (fold_convert (type, arg0));
6974 /* (a * (1 << b)) is (a << b) */
6975 if (TREE_CODE (arg1) == LSHIFT_EXPR
6976 && integer_onep (TREE_OPERAND (arg1, 0)))
6977 return fold (build2 (LSHIFT_EXPR, type, arg0,
6978 TREE_OPERAND (arg1, 1)));
6979 if (TREE_CODE (arg0) == LSHIFT_EXPR
6980 && integer_onep (TREE_OPERAND (arg0, 0)))
6981 return fold (build2 (LSHIFT_EXPR, type, arg1,
6982 TREE_OPERAND (arg0, 1)));
6984 if (TREE_CODE (arg1) == INTEGER_CST
6985 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6986 fold_convert (type, arg1),
6988 return fold_convert (type, tem);
6993 /* Maybe fold x * 0 to 0. The expressions aren't the same
6994 when x is NaN, since x * 0 is also NaN. Nor are they the
6995 same in modes with signed zeros, since multiplying a
6996 negative value by 0 gives -0, not +0. */
6997 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6998 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6999 && real_zerop (arg1))
7000 return omit_one_operand (type, arg1, arg0);
7001 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7002 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7003 && real_onep (arg1))
7004 return non_lvalue (fold_convert (type, arg0));
7006 /* Transform x * -1.0 into -x. */
7007 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7008 && real_minus_onep (arg1))
7009 return fold_convert (type, negate_expr (arg0));
7011 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7012 if (flag_unsafe_math_optimizations
7013 && TREE_CODE (arg0) == RDIV_EXPR
7014 && TREE_CODE (arg1) == REAL_CST
7015 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7017 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7020 return fold (build2 (RDIV_EXPR, type, tem,
7021 TREE_OPERAND (arg0, 1)));
7024 if (flag_unsafe_math_optimizations)
7026 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7027 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7029 /* Optimizations of root(...)*root(...). */
7030 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7032 tree rootfn, arg, arglist;
7033 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7034 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7036 /* Optimize sqrt(x)*sqrt(x) as x. */
7037 if (BUILTIN_SQRT_P (fcode0)
7038 && operand_equal_p (arg00, arg10, 0)
7039 && ! HONOR_SNANS (TYPE_MODE (type)))
7042 /* Optimize root(x)*root(y) as root(x*y). */
7043 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7044 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7045 arglist = build_tree_list (NULL_TREE, arg);
7046 return build_function_call_expr (rootfn, arglist);
7049 /* Optimize expN(x)*expN(y) as expN(x+y). */
7050 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7052 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7053 tree arg = build2 (PLUS_EXPR, type,
7054 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7055 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7056 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7057 return build_function_call_expr (expfn, arglist);
7060 /* Optimizations of pow(...)*pow(...). */
7061 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7062 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7063 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7065 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7066 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7068 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7069 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7072 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7073 if (operand_equal_p (arg01, arg11, 0))
7075 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7076 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7077 tree arglist = tree_cons (NULL_TREE, fold (arg),
7078 build_tree_list (NULL_TREE,
7080 return build_function_call_expr (powfn, arglist);
7083 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7084 if (operand_equal_p (arg00, arg10, 0))
7086 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7087 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7088 tree arglist = tree_cons (NULL_TREE, arg00,
7089 build_tree_list (NULL_TREE,
7091 return build_function_call_expr (powfn, arglist);
7095 /* Optimize tan(x)*cos(x) as sin(x). */
7096 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7097 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7098 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7099 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7100 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7101 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7102 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7103 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7105 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7107 if (sinfn != NULL_TREE)
7108 return build_function_call_expr (sinfn,
7109 TREE_OPERAND (arg0, 1));
7112 /* Optimize x*pow(x,c) as pow(x,c+1). */
7113 if (fcode1 == BUILT_IN_POW
7114 || fcode1 == BUILT_IN_POWF
7115 || fcode1 == BUILT_IN_POWL)
7117 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7118 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7120 if (TREE_CODE (arg11) == REAL_CST
7121 && ! TREE_CONSTANT_OVERFLOW (arg11)
7122 && operand_equal_p (arg0, arg10, 0))
7124 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7128 c = TREE_REAL_CST (arg11);
7129 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7130 arg = build_real (type, c);
7131 arglist = build_tree_list (NULL_TREE, arg);
7132 arglist = tree_cons (NULL_TREE, arg0, arglist);
7133 return build_function_call_expr (powfn, arglist);
7137 /* Optimize pow(x,c)*x as pow(x,c+1). */
7138 if (fcode0 == BUILT_IN_POW
7139 || fcode0 == BUILT_IN_POWF
7140 || fcode0 == BUILT_IN_POWL)
7142 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7143 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7145 if (TREE_CODE (arg01) == REAL_CST
7146 && ! TREE_CONSTANT_OVERFLOW (arg01)
7147 && operand_equal_p (arg1, arg00, 0))
7149 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7153 c = TREE_REAL_CST (arg01);
7154 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7155 arg = build_real (type, c);
7156 arglist = build_tree_list (NULL_TREE, arg);
7157 arglist = tree_cons (NULL_TREE, arg1, arglist);
7158 return build_function_call_expr (powfn, arglist);
7162 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7164 && operand_equal_p (arg0, arg1, 0))
7166 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7170 tree arg = build_real (type, dconst2);
7171 tree arglist = build_tree_list (NULL_TREE, arg);
7172 arglist = tree_cons (NULL_TREE, arg0, arglist);
7173 return build_function_call_expr (powfn, arglist);
7182 if (integer_all_onesp (arg1))
7183 return omit_one_operand (type, arg1, arg0);
7184 if (integer_zerop (arg1))
7185 return non_lvalue (fold_convert (type, arg0));
7186 if (operand_equal_p (arg0, arg1, 0))
7187 return non_lvalue (fold_convert (type, arg0));
7188 t1 = distribute_bit_expr (code, type, arg0, arg1);
7189 if (t1 != NULL_TREE)
7192 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7194 This results in more efficient code for machines without a NAND
7195 instruction. Combine will canonicalize to the first form
7196 which will allow use of NAND instructions provided by the
7197 backend if they exist. */
7198 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7199 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7201 return fold (build1 (BIT_NOT_EXPR, type,
7202 build2 (BIT_AND_EXPR, type,
7203 TREE_OPERAND (arg0, 0),
7204 TREE_OPERAND (arg1, 0))));
7207 /* See if this can be simplified into a rotate first. If that
7208 is unsuccessful continue in the association code. */
7212 if (integer_zerop (arg1))
7213 return non_lvalue (fold_convert (type, arg0));
7214 if (integer_all_onesp (arg1))
7215 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7216 if (operand_equal_p (arg0, arg1, 0))
7217 return omit_one_operand (type, integer_zero_node, arg0);
7219 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7220 with a constant, and the two constants have no bits in common,
7221 we should treat this as a BIT_IOR_EXPR since this may produce more
7223 if (TREE_CODE (arg0) == BIT_AND_EXPR
7224 && TREE_CODE (arg1) == BIT_AND_EXPR
7225 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7226 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7227 && integer_zerop (const_binop (BIT_AND_EXPR,
7228 TREE_OPERAND (arg0, 1),
7229 TREE_OPERAND (arg1, 1), 0)))
7231 code = BIT_IOR_EXPR;
7235 /* See if this can be simplified into a rotate first. If that
7236 is unsuccessful continue in the association code. */
7240 if (integer_all_onesp (arg1))
7241 return non_lvalue (fold_convert (type, arg0));
7242 if (integer_zerop (arg1))
7243 return omit_one_operand (type, arg1, arg0);
7244 if (operand_equal_p (arg0, arg1, 0))
7245 return non_lvalue (fold_convert (type, arg0));
7246 t1 = distribute_bit_expr (code, type, arg0, arg1);
7247 if (t1 != NULL_TREE)
7249 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7250 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7251 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7254 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7256 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7257 && (~TREE_INT_CST_LOW (arg1)
7258 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7259 return fold_convert (type, TREE_OPERAND (arg0, 0));
7262 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7264 This results in more efficient code for machines without a NOR
7265 instruction. Combine will canonicalize to the first form
7266 which will allow use of NOR instructions provided by the
7267 backend if they exist. */
7268 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7269 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7271 return fold (build1 (BIT_NOT_EXPR, type,
7272 build2 (BIT_IOR_EXPR, type,
7273 TREE_OPERAND (arg0, 0),
7274 TREE_OPERAND (arg1, 0))));
7280 /* Don't touch a floating-point divide by zero unless the mode
7281 of the constant can represent infinity. */
7282 if (TREE_CODE (arg1) == REAL_CST
7283 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7284 && real_zerop (arg1))
7287 /* (-A) / (-B) -> A / B */
7288 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7289 return fold (build2 (RDIV_EXPR, type,
7290 TREE_OPERAND (arg0, 0),
7291 negate_expr (arg1)));
7292 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7293 return fold (build2 (RDIV_EXPR, type,
7295 TREE_OPERAND (arg1, 0)));
7297 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7298 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7299 && real_onep (arg1))
7300 return non_lvalue (fold_convert (type, arg0));
7302 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7303 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7304 && real_minus_onep (arg1))
7305 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7307 /* If ARG1 is a constant, we can convert this to a multiply by the
7308 reciprocal. This does not have the same rounding properties,
7309 so only do this if -funsafe-math-optimizations. We can actually
7310 always safely do it if ARG1 is a power of two, but it's hard to
7311 tell if it is or not in a portable manner. */
7312 if (TREE_CODE (arg1) == REAL_CST)
7314 if (flag_unsafe_math_optimizations
7315 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7317 return fold (build2 (MULT_EXPR, type, arg0, tem));
7318 /* Find the reciprocal if optimizing and the result is exact. */
7322 r = TREE_REAL_CST (arg1);
7323 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7325 tem = build_real (type, r);
7326 return fold (build2 (MULT_EXPR, type, arg0, tem));
7330 /* Convert A/B/C to A/(B*C). */
7331 if (flag_unsafe_math_optimizations
7332 && TREE_CODE (arg0) == RDIV_EXPR)
7333 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7334 fold (build2 (MULT_EXPR, type,
7335 TREE_OPERAND (arg0, 1), arg1))));
7337 /* Convert A/(B/C) to (A/B)*C. */
7338 if (flag_unsafe_math_optimizations
7339 && TREE_CODE (arg1) == RDIV_EXPR)
7340 return fold (build2 (MULT_EXPR, type,
7341 fold (build2 (RDIV_EXPR, type, arg0,
7342 TREE_OPERAND (arg1, 0))),
7343 TREE_OPERAND (arg1, 1)));
7345 /* Convert C1/(X*C2) into (C1/C2)/X. */
7346 if (flag_unsafe_math_optimizations
7347 && TREE_CODE (arg1) == MULT_EXPR
7348 && TREE_CODE (arg0) == REAL_CST
7349 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7351 tree tem = const_binop (RDIV_EXPR, arg0,
7352 TREE_OPERAND (arg1, 1), 0);
7354 return fold (build2 (RDIV_EXPR, type, tem,
7355 TREE_OPERAND (arg1, 0)));
7358 if (flag_unsafe_math_optimizations)
7360 enum built_in_function fcode = builtin_mathfn_code (arg1);
7361 /* Optimize x/expN(y) into x*expN(-y). */
7362 if (BUILTIN_EXPONENT_P (fcode))
7364 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7365 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7366 tree arglist = build_tree_list (NULL_TREE,
7367 fold_convert (type, arg));
7368 arg1 = build_function_call_expr (expfn, arglist);
7369 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7372 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7373 if (fcode == BUILT_IN_POW
7374 || fcode == BUILT_IN_POWF
7375 || fcode == BUILT_IN_POWL)
7377 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7378 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7379 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7380 tree neg11 = fold_convert (type, negate_expr (arg11));
7381 tree arglist = tree_cons(NULL_TREE, arg10,
7382 build_tree_list (NULL_TREE, neg11));
7383 arg1 = build_function_call_expr (powfn, arglist);
7384 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7388 if (flag_unsafe_math_optimizations)
7390 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7391 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7393 /* Optimize sin(x)/cos(x) as tan(x). */
7394 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7395 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7396 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7397 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7398 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7400 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7402 if (tanfn != NULL_TREE)
7403 return build_function_call_expr (tanfn,
7404 TREE_OPERAND (arg0, 1));
7407 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7408 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7409 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7410 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7411 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7412 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7414 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7416 if (tanfn != NULL_TREE)
7418 tree tmp = TREE_OPERAND (arg0, 1);
7419 tmp = build_function_call_expr (tanfn, tmp);
7420 return fold (build2 (RDIV_EXPR, type,
7421 build_real (type, dconst1), tmp));
7425 /* Optimize pow(x,c)/x as pow(x,c-1). */
7426 if (fcode0 == BUILT_IN_POW
7427 || fcode0 == BUILT_IN_POWF
7428 || fcode0 == BUILT_IN_POWL)
7430 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7431 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7432 if (TREE_CODE (arg01) == REAL_CST
7433 && ! TREE_CONSTANT_OVERFLOW (arg01)
7434 && operand_equal_p (arg1, arg00, 0))
7436 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7440 c = TREE_REAL_CST (arg01);
7441 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7442 arg = build_real (type, c);
7443 arglist = build_tree_list (NULL_TREE, arg);
7444 arglist = tree_cons (NULL_TREE, arg1, arglist);
7445 return build_function_call_expr (powfn, arglist);
7451 case TRUNC_DIV_EXPR:
7452 case ROUND_DIV_EXPR:
7453 case FLOOR_DIV_EXPR:
7455 case EXACT_DIV_EXPR:
7456 if (integer_onep (arg1))
7457 return non_lvalue (fold_convert (type, arg0));
7458 if (integer_zerop (arg1))
7461 if (!TYPE_UNSIGNED (type)
7462 && TREE_CODE (arg1) == INTEGER_CST
7463 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7464 && TREE_INT_CST_HIGH (arg1) == -1)
7465 return fold_convert (type, negate_expr (arg0));
7467 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7468 operation, EXACT_DIV_EXPR.
7470 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7471 At one time others generated faster code, it's not clear if they do
7472 after the last round to changes to the DIV code in expmed.c. */
7473 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7474 && multiple_of_p (type, arg0, arg1))
7475 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7477 if (TREE_CODE (arg1) == INTEGER_CST
7478 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7480 return fold_convert (type, tem);
7485 case FLOOR_MOD_EXPR:
7486 case ROUND_MOD_EXPR:
7487 case TRUNC_MOD_EXPR:
7488 if (integer_onep (arg1))
7489 return omit_one_operand (type, integer_zero_node, arg0);
7490 if (integer_zerop (arg1))
7492 /* X % -1 is zero. */
7493 if (!TYPE_UNSIGNED (type)
7494 && TREE_CODE (arg1) == INTEGER_CST
7495 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7496 && TREE_INT_CST_HIGH (arg1) == -1)
7497 return omit_one_operand (type, integer_zero_node, arg0);
7499 if (TREE_CODE (arg1) == INTEGER_CST
7500 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7502 return fold_convert (type, tem);
7508 if (integer_all_onesp (arg0))
7509 return omit_one_operand (type, arg0, arg1);
7513 /* Optimize -1 >> x for arithmetic right shifts. */
7514 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7515 return omit_one_operand (type, arg0, arg1);
7516 /* ... fall through ... */
7520 if (integer_zerop (arg1))
7521 return non_lvalue (fold_convert (type, arg0));
7522 if (integer_zerop (arg0))
7523 return omit_one_operand (type, arg0, arg1);
7525 /* Since negative shift count is not well-defined,
7526 don't try to compute it in the compiler. */
7527 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7529 /* Rewrite an LROTATE_EXPR by a constant into an
7530 RROTATE_EXPR by a new constant. */
7531 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7533 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7534 tem = fold_convert (TREE_TYPE (arg1), tem);
7535 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7536 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7539 /* If we have a rotate of a bit operation with the rotate count and
7540 the second operand of the bit operation both constant,
7541 permute the two operations. */
7542 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7543 && (TREE_CODE (arg0) == BIT_AND_EXPR
7544 || TREE_CODE (arg0) == BIT_IOR_EXPR
7545 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7546 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7547 return fold (build2 (TREE_CODE (arg0), type,
7548 fold (build2 (code, type,
7549 TREE_OPERAND (arg0, 0), arg1)),
7550 fold (build2 (code, type,
7551 TREE_OPERAND (arg0, 1), arg1))));
7553 /* Two consecutive rotates adding up to the width of the mode can
7555 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7556 && TREE_CODE (arg0) == RROTATE_EXPR
7557 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7558 && TREE_INT_CST_HIGH (arg1) == 0
7559 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7560 && ((TREE_INT_CST_LOW (arg1)
7561 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7562 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7563 return TREE_OPERAND (arg0, 0);
7568 if (operand_equal_p (arg0, arg1, 0))
7569 return omit_one_operand (type, arg0, arg1);
7570 if (INTEGRAL_TYPE_P (type)
7571 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7572 return omit_one_operand (type, arg1, arg0);
7576 if (operand_equal_p (arg0, arg1, 0))
7577 return omit_one_operand (type, arg0, arg1);
7578 if (INTEGRAL_TYPE_P (type)
7579 && TYPE_MAX_VALUE (type)
7580 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7581 return omit_one_operand (type, arg1, arg0);
7584 case TRUTH_NOT_EXPR:
7585 /* The argument to invert_truthvalue must have Boolean type. */
7586 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7587 arg0 = fold_convert (boolean_type_node, arg0);
7589 /* Note that the operand of this must be an int
7590 and its values must be 0 or 1.
7591 ("true" is a fixed value perhaps depending on the language,
7592 but we don't handle values other than 1 correctly yet.) */
7593 tem = invert_truthvalue (arg0);
7594 /* Avoid infinite recursion. */
7595 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7597 tem = fold_single_bit_test (code, arg0, arg1, type);
7602 return fold_convert (type, tem);
7604 case TRUTH_ANDIF_EXPR:
7605 /* Note that the operands of this must be ints
7606 and their values must be 0 or 1.
7607 ("true" is a fixed value perhaps depending on the language.) */
7608 /* If first arg is constant zero, return it. */
7609 if (integer_zerop (arg0))
7610 return fold_convert (type, arg0);
7611 case TRUTH_AND_EXPR:
7612 /* If either arg is constant true, drop it. */
7613 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7614 return non_lvalue (fold_convert (type, arg1));
7615 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7616 /* Preserve sequence points. */
7617 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7618 return non_lvalue (fold_convert (type, arg0));
7619 /* If second arg is constant zero, result is zero, but first arg
7620 must be evaluated. */
7621 if (integer_zerop (arg1))
7622 return omit_one_operand (type, arg1, arg0);
7623 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7624 case will be handled here. */
7625 if (integer_zerop (arg0))
7626 return omit_one_operand (type, arg0, arg1);
7629 /* We only do these simplifications if we are optimizing. */
7633 /* Check for things like (A || B) && (A || C). We can convert this
7634 to A || (B && C). Note that either operator can be any of the four
7635 truth and/or operations and the transformation will still be
7636 valid. Also note that we only care about order for the
7637 ANDIF and ORIF operators. If B contains side effects, this
7638 might change the truth-value of A. */
7639 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7640 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7641 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7642 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7643 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7644 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7646 tree a00 = TREE_OPERAND (arg0, 0);
7647 tree a01 = TREE_OPERAND (arg0, 1);
7648 tree a10 = TREE_OPERAND (arg1, 0);
7649 tree a11 = TREE_OPERAND (arg1, 1);
7650 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7651 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7652 && (code == TRUTH_AND_EXPR
7653 || code == TRUTH_OR_EXPR));
7655 if (operand_equal_p (a00, a10, 0))
7656 return fold (build2 (TREE_CODE (arg0), type, a00,
7657 fold (build2 (code, type, a01, a11))));
7658 else if (commutative && operand_equal_p (a00, a11, 0))
7659 return fold (build2 (TREE_CODE (arg0), type, a00,
7660 fold (build2 (code, type, a01, a10))));
7661 else if (commutative && operand_equal_p (a01, a10, 0))
7662 return fold (build2 (TREE_CODE (arg0), type, a01,
7663 fold (build2 (code, type, a00, a11))));
7665 /* This case if tricky because we must either have commutative
7666 operators or else A10 must not have side-effects. */
7668 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7669 && operand_equal_p (a01, a11, 0))
7670 return fold (build2 (TREE_CODE (arg0), type,
7671 fold (build2 (code, type, a00, a10)),
7675 /* See if we can build a range comparison. */
7676 if (0 != (tem = fold_range_test (t)))
7679 /* Check for the possibility of merging component references. If our
7680 lhs is another similar operation, try to merge its rhs with our
7681 rhs. Then try to merge our lhs and rhs. */
7682 if (TREE_CODE (arg0) == code
7683 && 0 != (tem = fold_truthop (code, type,
7684 TREE_OPERAND (arg0, 1), arg1)))
7685 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7687 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7692 case TRUTH_ORIF_EXPR:
7693 /* Note that the operands of this must be ints
7694 and their values must be 0 or true.
7695 ("true" is a fixed value perhaps depending on the language.) */
7696 /* If first arg is constant true, return it. */
7697 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7698 return fold_convert (type, arg0);
7700 /* If either arg is constant zero, drop it. */
7701 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7702 return non_lvalue (fold_convert (type, arg1));
7703 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7704 /* Preserve sequence points. */
7705 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7706 return non_lvalue (fold_convert (type, arg0));
7707 /* If second arg is constant true, result is true, but we must
7708 evaluate first arg. */
7709 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7710 return omit_one_operand (type, arg1, arg0);
7711 /* Likewise for first arg, but note this only occurs here for
7713 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7714 return omit_one_operand (type, arg0, arg1);
7717 case TRUTH_XOR_EXPR:
7718 /* If either arg is constant zero, drop it. */
7719 if (integer_zerop (arg0))
7720 return non_lvalue (fold_convert (type, arg1));
7721 if (integer_zerop (arg1))
7722 return non_lvalue (fold_convert (type, arg0));
7723 /* If either arg is constant true, this is a logical inversion. */
7724 if (integer_onep (arg0))
7725 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7726 if (integer_onep (arg1))
7727 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7728 /* Identical arguments cancel to zero. */
7729 if (operand_equal_p (arg0, arg1, 0))
7730 return omit_one_operand (type, integer_zero_node, arg0);
7739 /* If one arg is a real or integer constant, put it last. */
7740 if (tree_swap_operands_p (arg0, arg1, true))
7741 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7743 /* If this is an equality comparison of the address of a non-weak
7744 object against zero, then we know the result. */
7745 if ((code == EQ_EXPR || code == NE_EXPR)
7746 && TREE_CODE (arg0) == ADDR_EXPR
7747 && DECL_P (TREE_OPERAND (arg0, 0))
7748 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7749 && integer_zerop (arg1))
7750 return constant_boolean_node (code != EQ_EXPR, type);
7752 /* If this is an equality comparison of the address of two non-weak,
7753 unaliased symbols neither of which are extern (since we do not
7754 have access to attributes for externs), then we know the result. */
7755 if ((code == EQ_EXPR || code == NE_EXPR)
7756 && TREE_CODE (arg0) == ADDR_EXPR
7757 && DECL_P (TREE_OPERAND (arg0, 0))
7758 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7759 && ! lookup_attribute ("alias",
7760 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7761 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7762 && TREE_CODE (arg1) == ADDR_EXPR
7763 && DECL_P (TREE_OPERAND (arg1, 0))
7764 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7765 && ! lookup_attribute ("alias",
7766 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7767 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7768 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7769 ? code == EQ_EXPR : code != EQ_EXPR,
7772 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7774 tree targ0 = strip_float_extensions (arg0);
7775 tree targ1 = strip_float_extensions (arg1);
7776 tree newtype = TREE_TYPE (targ0);
7778 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7779 newtype = TREE_TYPE (targ1);
7781 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7782 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7783 return fold (build2 (code, type, fold_convert (newtype, targ0),
7784 fold_convert (newtype, targ1)));
7786 /* (-a) CMP (-b) -> b CMP a */
7787 if (TREE_CODE (arg0) == NEGATE_EXPR
7788 && TREE_CODE (arg1) == NEGATE_EXPR)
7789 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7790 TREE_OPERAND (arg0, 0)));
7792 if (TREE_CODE (arg1) == REAL_CST)
7794 REAL_VALUE_TYPE cst;
7795 cst = TREE_REAL_CST (arg1);
7797 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7798 if (TREE_CODE (arg0) == NEGATE_EXPR)
7800 fold (build2 (swap_tree_comparison (code), type,
7801 TREE_OPERAND (arg0, 0),
7802 build_real (TREE_TYPE (arg1),
7803 REAL_VALUE_NEGATE (cst))));
7805 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7806 /* a CMP (-0) -> a CMP 0 */
7807 if (REAL_VALUE_MINUS_ZERO (cst))
7808 return fold (build2 (code, type, arg0,
7809 build_real (TREE_TYPE (arg1), dconst0)));
7811 /* x != NaN is always true, other ops are always false. */
7812 if (REAL_VALUE_ISNAN (cst)
7813 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7815 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7816 return omit_one_operand (type, tem, arg0);
7819 /* Fold comparisons against infinity. */
7820 if (REAL_VALUE_ISINF (cst))
7822 tem = fold_inf_compare (code, type, arg0, arg1);
7823 if (tem != NULL_TREE)
7828 /* If this is a comparison of a real constant with a PLUS_EXPR
7829 or a MINUS_EXPR of a real constant, we can convert it into a
7830 comparison with a revised real constant as long as no overflow
7831 occurs when unsafe_math_optimizations are enabled. */
7832 if (flag_unsafe_math_optimizations
7833 && TREE_CODE (arg1) == REAL_CST
7834 && (TREE_CODE (arg0) == PLUS_EXPR
7835 || TREE_CODE (arg0) == MINUS_EXPR)
7836 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7837 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7838 ? MINUS_EXPR : PLUS_EXPR,
7839 arg1, TREE_OPERAND (arg0, 1), 0))
7840 && ! TREE_CONSTANT_OVERFLOW (tem))
7841 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7843 /* Likewise, we can simplify a comparison of a real constant with
7844 a MINUS_EXPR whose first operand is also a real constant, i.e.
7845 (c1 - x) < c2 becomes x > c1-c2. */
7846 if (flag_unsafe_math_optimizations
7847 && TREE_CODE (arg1) == REAL_CST
7848 && TREE_CODE (arg0) == MINUS_EXPR
7849 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7850 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7852 && ! TREE_CONSTANT_OVERFLOW (tem))
7853 return fold (build2 (swap_tree_comparison (code), type,
7854 TREE_OPERAND (arg0, 1), tem));
7856 /* Fold comparisons against built-in math functions. */
7857 if (TREE_CODE (arg1) == REAL_CST
7858 && flag_unsafe_math_optimizations
7859 && ! flag_errno_math)
7861 enum built_in_function fcode = builtin_mathfn_code (arg0);
7863 if (fcode != END_BUILTINS)
7865 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7866 if (tem != NULL_TREE)
7872 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7873 if (TREE_CONSTANT (arg1)
7874 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7875 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7876 /* This optimization is invalid for ordered comparisons
7877 if CONST+INCR overflows or if foo+incr might overflow.
7878 This optimization is invalid for floating point due to rounding.
7879 For pointer types we assume overflow doesn't happen. */
7880 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7881 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7882 && (code == EQ_EXPR || code == NE_EXPR))))
7884 tree varop, newconst;
7886 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7888 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7889 arg1, TREE_OPERAND (arg0, 1)));
7890 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7891 TREE_OPERAND (arg0, 0),
7892 TREE_OPERAND (arg0, 1));
7896 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7897 arg1, TREE_OPERAND (arg0, 1)));
7898 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7899 TREE_OPERAND (arg0, 0),
7900 TREE_OPERAND (arg0, 1));
7904 /* If VAROP is a reference to a bitfield, we must mask
7905 the constant by the width of the field. */
7906 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7907 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7908 && host_integerp (DECL_SIZE (TREE_OPERAND
7909 (TREE_OPERAND (varop, 0), 1)), 1))
7911 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7912 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7913 tree folded_compare, shift;
7915 /* First check whether the comparison would come out
7916 always the same. If we don't do that we would
7917 change the meaning with the masking. */
7918 folded_compare = fold (build2 (code, type,
7919 TREE_OPERAND (varop, 0), arg1));
7920 if (integer_zerop (folded_compare)
7921 || integer_onep (folded_compare))
7922 return omit_one_operand (type, folded_compare, varop);
7924 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7926 shift = fold_convert (TREE_TYPE (varop), shift);
7927 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7929 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7933 return fold (build2 (code, type, varop, newconst));
7936 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7937 This transformation affects the cases which are handled in later
7938 optimizations involving comparisons with non-negative constants. */
7939 if (TREE_CODE (arg1) == INTEGER_CST
7940 && TREE_CODE (arg0) != INTEGER_CST
7941 && tree_int_cst_sgn (arg1) > 0)
7946 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7947 return fold (build2 (GT_EXPR, type, arg0, arg1));
7950 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7951 return fold (build2 (LE_EXPR, type, arg0, arg1));
7958 /* Comparisons with the highest or lowest possible integer of
7959 the specified size will have known values.
7961 This is quite similar to fold_relational_hi_lo; however, my
7962 attempts to share the code have been nothing but trouble.
7963 I give up for now. */
7965 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7967 if (TREE_CODE (arg1) == INTEGER_CST
7968 && ! TREE_CONSTANT_OVERFLOW (arg1)
7969 && width <= HOST_BITS_PER_WIDE_INT
7970 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7971 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7973 unsigned HOST_WIDE_INT signed_max;
7974 unsigned HOST_WIDE_INT max, min;
7976 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7978 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7980 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7986 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7989 if (TREE_INT_CST_HIGH (arg1) == 0
7990 && TREE_INT_CST_LOW (arg1) == max)
7994 return omit_one_operand (type, integer_zero_node, arg0);
7997 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8000 return omit_one_operand (type, integer_one_node, arg0);
8003 return fold (build2 (NE_EXPR, type, arg0, arg1));
8005 /* The GE_EXPR and LT_EXPR cases above are not normally
8006 reached because of previous transformations. */
8011 else if (TREE_INT_CST_HIGH (arg1) == 0
8012 && TREE_INT_CST_LOW (arg1) == max - 1)
8016 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8017 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8019 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8020 return fold (build2 (NE_EXPR, type, arg0, arg1));
8024 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8025 && TREE_INT_CST_LOW (arg1) == min)
8029 return omit_one_operand (type, integer_zero_node, arg0);
8032 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8035 return omit_one_operand (type, integer_one_node, arg0);
8038 return fold (build2 (NE_EXPR, type, arg0, arg1));
8043 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8044 && TREE_INT_CST_LOW (arg1) == min + 1)
8048 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8049 return fold (build2 (NE_EXPR, type, arg0, arg1));
8051 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8052 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8057 else if (!in_gimple_form
8058 && TREE_INT_CST_HIGH (arg1) == 0
8059 && TREE_INT_CST_LOW (arg1) == signed_max
8060 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8061 /* signed_type does not work on pointer types. */
8062 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8064 /* The following case also applies to X < signed_max+1
8065 and X >= signed_max+1 because previous transformations. */
8066 if (code == LE_EXPR || code == GT_EXPR)
8069 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8070 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8072 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8073 type, fold_convert (st0, arg0),
8074 fold_convert (st1, integer_zero_node)));
8080 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8081 a MINUS_EXPR of a constant, we can convert it into a comparison with
8082 a revised constant as long as no overflow occurs. */
8083 if ((code == EQ_EXPR || code == NE_EXPR)
8084 && TREE_CODE (arg1) == INTEGER_CST
8085 && (TREE_CODE (arg0) == PLUS_EXPR
8086 || TREE_CODE (arg0) == MINUS_EXPR)
8087 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8088 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8089 ? MINUS_EXPR : PLUS_EXPR,
8090 arg1, TREE_OPERAND (arg0, 1), 0))
8091 && ! TREE_CONSTANT_OVERFLOW (tem))
8092 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8094 /* Similarly for a NEGATE_EXPR. */
8095 else if ((code == EQ_EXPR || code == NE_EXPR)
8096 && TREE_CODE (arg0) == NEGATE_EXPR
8097 && TREE_CODE (arg1) == INTEGER_CST
8098 && 0 != (tem = negate_expr (arg1))
8099 && TREE_CODE (tem) == INTEGER_CST
8100 && ! TREE_CONSTANT_OVERFLOW (tem))
8101 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8103 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8104 for !=. Don't do this for ordered comparisons due to overflow. */
8105 else if ((code == NE_EXPR || code == EQ_EXPR)
8106 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8107 return fold (build2 (code, type,
8108 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8110 /* If we are widening one operand of an integer comparison,
8111 see if the other operand is similarly being widened. Perhaps we
8112 can do the comparison in the narrower type. */
8113 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8114 && TREE_CODE (arg0) == NOP_EXPR
8115 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8116 && (code == EQ_EXPR || code == NE_EXPR
8117 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8118 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8119 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8120 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8121 || (TREE_CODE (t1) == INTEGER_CST
8122 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8123 return fold (build2 (code, type, tem,
8124 fold_convert (TREE_TYPE (tem), t1)));
8126 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8127 constant, we can simplify it. */
8128 else if (TREE_CODE (arg1) == INTEGER_CST
8129 && (TREE_CODE (arg0) == MIN_EXPR
8130 || TREE_CODE (arg0) == MAX_EXPR)
8131 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8132 return optimize_minmax_comparison (t);
8134 /* If we are comparing an ABS_EXPR with a constant, we can
8135 convert all the cases into explicit comparisons, but they may
8136 well not be faster than doing the ABS and one comparison.
8137 But ABS (X) <= C is a range comparison, which becomes a subtraction
8138 and a comparison, and is probably faster. */
8139 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8140 && TREE_CODE (arg0) == ABS_EXPR
8141 && ! TREE_SIDE_EFFECTS (arg0)
8142 && (0 != (tem = negate_expr (arg1)))
8143 && TREE_CODE (tem) == INTEGER_CST
8144 && ! TREE_CONSTANT_OVERFLOW (tem))
8145 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8146 build2 (GE_EXPR, type,
8147 TREE_OPERAND (arg0, 0), tem),
8148 build2 (LE_EXPR, type,
8149 TREE_OPERAND (arg0, 0), arg1)));
8151 /* If this is an EQ or NE comparison with zero and ARG0 is
8152 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8153 two operations, but the latter can be done in one less insn
8154 on machines that have only two-operand insns or on which a
8155 constant cannot be the first operand. */
8156 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8157 && TREE_CODE (arg0) == BIT_AND_EXPR)
8159 tree arg00 = TREE_OPERAND (arg0, 0);
8160 tree arg01 = TREE_OPERAND (arg0, 1);
8161 if (TREE_CODE (arg00) == LSHIFT_EXPR
8162 && integer_onep (TREE_OPERAND (arg00, 0)))
8164 fold (build2 (code, type,
8165 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8166 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8167 arg01, TREE_OPERAND (arg00, 1)),
8168 fold_convert (TREE_TYPE (arg0),
8171 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8172 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8174 fold (build2 (code, type,
8175 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8176 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8177 arg00, TREE_OPERAND (arg01, 1)),
8178 fold_convert (TREE_TYPE (arg0),
8183 /* If this is an NE or EQ comparison of zero against the result of a
8184 signed MOD operation whose second operand is a power of 2, make
8185 the MOD operation unsigned since it is simpler and equivalent. */
8186 if ((code == NE_EXPR || code == EQ_EXPR)
8187 && integer_zerop (arg1)
8188 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8189 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8190 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8191 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8192 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8193 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8195 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8196 tree newmod = build2 (TREE_CODE (arg0), newtype,
8197 fold_convert (newtype,
8198 TREE_OPERAND (arg0, 0)),
8199 fold_convert (newtype,
8200 TREE_OPERAND (arg0, 1)));
8202 return build2 (code, type, newmod, fold_convert (newtype, arg1));
8205 /* If this is an NE comparison of zero with an AND of one, remove the
8206 comparison since the AND will give the correct value. */
8207 if (code == NE_EXPR && integer_zerop (arg1)
8208 && TREE_CODE (arg0) == BIT_AND_EXPR
8209 && integer_onep (TREE_OPERAND (arg0, 1)))
8210 return fold_convert (type, arg0);
8212 /* If we have (A & C) == C where C is a power of 2, convert this into
8213 (A & C) != 0. Similarly for NE_EXPR. */
8214 if ((code == EQ_EXPR || code == NE_EXPR)
8215 && TREE_CODE (arg0) == BIT_AND_EXPR
8216 && integer_pow2p (TREE_OPERAND (arg0, 1))
8217 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8218 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8219 arg0, integer_zero_node));
8221 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8222 2, then fold the expression into shifts and logical operations. */
8223 tem = fold_single_bit_test (code, arg0, arg1, type);
8227 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8228 Similarly for NE_EXPR. */
8229 if ((code == EQ_EXPR || code == NE_EXPR)
8230 && TREE_CODE (arg0) == BIT_AND_EXPR
8231 && TREE_CODE (arg1) == INTEGER_CST
8232 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8235 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8236 arg1, build1 (BIT_NOT_EXPR,
8237 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8238 TREE_OPERAND (arg0, 1))));
8239 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8240 if (integer_nonzerop (dandnotc))
8241 return omit_one_operand (type, rslt, arg0);
8244 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8245 Similarly for NE_EXPR. */
8246 if ((code == EQ_EXPR || code == NE_EXPR)
8247 && TREE_CODE (arg0) == BIT_IOR_EXPR
8248 && TREE_CODE (arg1) == INTEGER_CST
8249 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8252 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8253 TREE_OPERAND (arg0, 1),
8254 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8255 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8256 if (integer_nonzerop (candnotd))
8257 return omit_one_operand (type, rslt, arg0);
8260 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8261 and similarly for >= into !=. */
8262 if ((code == LT_EXPR || code == GE_EXPR)
8263 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8264 && TREE_CODE (arg1) == LSHIFT_EXPR
8265 && integer_onep (TREE_OPERAND (arg1, 0)))
8266 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8267 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8268 TREE_OPERAND (arg1, 1)),
8269 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8271 else if ((code == LT_EXPR || code == GE_EXPR)
8272 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8273 && (TREE_CODE (arg1) == NOP_EXPR
8274 || TREE_CODE (arg1) == CONVERT_EXPR)
8275 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8276 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8278 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8279 fold_convert (TREE_TYPE (arg0),
8280 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8281 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8283 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8285 /* Simplify comparison of something with itself. (For IEEE
8286 floating-point, we can only do some of these simplifications.) */
8287 if (operand_equal_p (arg0, arg1, 0))
8292 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8293 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8294 return constant_boolean_node (1, type);
8299 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8300 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8301 return constant_boolean_node (1, type);
8302 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8305 /* For NE, we can only do this simplification if integer
8306 or we don't honor IEEE floating point NaNs. */
8307 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8308 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8310 /* ... fall through ... */
8313 return constant_boolean_node (0, type);
8319 /* If we are comparing an expression that just has comparisons
8320 of two integer values, arithmetic expressions of those comparisons,
8321 and constants, we can simplify it. There are only three cases
8322 to check: the two values can either be equal, the first can be
8323 greater, or the second can be greater. Fold the expression for
8324 those three values. Since each value must be 0 or 1, we have
8325 eight possibilities, each of which corresponds to the constant 0
8326 or 1 or one of the six possible comparisons.
8328 This handles common cases like (a > b) == 0 but also handles
8329 expressions like ((x > y) - (y > x)) > 0, which supposedly
8330 occur in macroized code. */
8332 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8334 tree cval1 = 0, cval2 = 0;
8337 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8338 /* Don't handle degenerate cases here; they should already
8339 have been handled anyway. */
8340 && cval1 != 0 && cval2 != 0
8341 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8342 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8343 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8344 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8345 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8346 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8347 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8349 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8350 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8352 /* We can't just pass T to eval_subst in case cval1 or cval2
8353 was the same as ARG1. */
8356 = fold (build2 (code, type,
8357 eval_subst (arg0, cval1, maxval,
8361 = fold (build2 (code, type,
8362 eval_subst (arg0, cval1, maxval,
8366 = fold (build2 (code, type,
8367 eval_subst (arg0, cval1, minval,
8371 /* All three of these results should be 0 or 1. Confirm they
8372 are. Then use those values to select the proper code
8375 if ((integer_zerop (high_result)
8376 || integer_onep (high_result))
8377 && (integer_zerop (equal_result)
8378 || integer_onep (equal_result))
8379 && (integer_zerop (low_result)
8380 || integer_onep (low_result)))
8382 /* Make a 3-bit mask with the high-order bit being the
8383 value for `>', the next for '=', and the low for '<'. */
8384 switch ((integer_onep (high_result) * 4)
8385 + (integer_onep (equal_result) * 2)
8386 + integer_onep (low_result))
8390 return omit_one_operand (type, integer_zero_node, arg0);
8411 return omit_one_operand (type, integer_one_node, arg0);
8414 tem = build2 (code, type, cval1, cval2);
8416 return save_expr (tem);
8423 /* If this is a comparison of a field, we may be able to simplify it. */
8424 if (((TREE_CODE (arg0) == COMPONENT_REF
8425 && lang_hooks.can_use_bit_fields_p ())
8426 || TREE_CODE (arg0) == BIT_FIELD_REF)
8427 && (code == EQ_EXPR || code == NE_EXPR)
8428 /* Handle the constant case even without -O
8429 to make sure the warnings are given. */
8430 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8432 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8437 /* If this is a comparison of complex values and either or both sides
8438 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8439 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8440 This may prevent needless evaluations. */
8441 if ((code == EQ_EXPR || code == NE_EXPR)
8442 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8443 && (TREE_CODE (arg0) == COMPLEX_EXPR
8444 || TREE_CODE (arg1) == COMPLEX_EXPR
8445 || TREE_CODE (arg0) == COMPLEX_CST
8446 || TREE_CODE (arg1) == COMPLEX_CST))
8448 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8449 tree real0, imag0, real1, imag1;
8451 arg0 = save_expr (arg0);
8452 arg1 = save_expr (arg1);
8453 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8454 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8455 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8456 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8458 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8461 fold (build2 (code, type, real0, real1)),
8462 fold (build2 (code, type, imag0, imag1))));
8465 /* Optimize comparisons of strlen vs zero to a compare of the
8466 first character of the string vs zero. To wit,
8467 strlen(ptr) == 0 => *ptr == 0
8468 strlen(ptr) != 0 => *ptr != 0
8469 Other cases should reduce to one of these two (or a constant)
8470 due to the return value of strlen being unsigned. */
8471 if ((code == EQ_EXPR || code == NE_EXPR)
8472 && integer_zerop (arg1)
8473 && TREE_CODE (arg0) == CALL_EXPR)
8475 tree fndecl = get_callee_fndecl (arg0);
8479 && DECL_BUILT_IN (fndecl)
8480 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8481 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8482 && (arglist = TREE_OPERAND (arg0, 1))
8483 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8484 && ! TREE_CHAIN (arglist))
8485 return fold (build2 (code, type,
8486 build1 (INDIRECT_REF, char_type_node,
8487 TREE_VALUE(arglist)),
8488 integer_zero_node));
8491 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8492 into a single range test. */
8493 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8494 && TREE_CODE (arg1) == INTEGER_CST
8495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8496 && !integer_zerop (TREE_OPERAND (arg0, 1))
8497 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8498 && !TREE_OVERFLOW (arg1))
8500 t1 = fold_div_compare (code, type, arg0, arg1);
8501 if (t1 != NULL_TREE)
8505 /* Both ARG0 and ARG1 are known to be constants at this point. */
8506 t1 = fold_relational_const (code, type, arg0, arg1);
8507 return (t1 == NULL_TREE ? t : t1);
8509 case UNORDERED_EXPR:
8517 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8519 t1 = fold_relational_const (code, type, arg0, arg1);
8520 if (t1 != NULL_TREE)
8524 /* If the first operand is NaN, the result is constant. */
8525 if (TREE_CODE (arg0) == REAL_CST
8526 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8527 && (code != LTGT_EXPR || ! flag_trapping_math))
8529 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8532 return omit_one_operand (type, t1, arg1);
8535 /* If the second operand is NaN, the result is constant. */
8536 if (TREE_CODE (arg1) == REAL_CST
8537 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8538 && (code != LTGT_EXPR || ! flag_trapping_math))
8540 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8543 return omit_one_operand (type, t1, arg0);
8546 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8548 tree targ0 = strip_float_extensions (arg0);
8549 tree targ1 = strip_float_extensions (arg1);
8550 tree newtype = TREE_TYPE (targ0);
8552 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8553 newtype = TREE_TYPE (targ1);
8555 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8556 return fold (build2 (code, type, fold_convert (newtype, targ0),
8557 fold_convert (newtype, targ1)));
8563 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8564 so all simple results must be passed through pedantic_non_lvalue. */
8565 if (TREE_CODE (arg0) == INTEGER_CST)
8567 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8568 /* Only optimize constant conditions when the selected branch
8569 has the same type as the COND_EXPR. This avoids optimizing
8570 away "c ? x : throw", where the throw has a void type. */
8571 if (! VOID_TYPE_P (TREE_TYPE (tem))
8572 || VOID_TYPE_P (type))
8573 return pedantic_non_lvalue (tem);
8576 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8577 return pedantic_omit_one_operand (type, arg1, arg0);
8579 /* If we have A op B ? A : C, we may be able to convert this to a
8580 simpler expression, depending on the operation and the values
8581 of B and C. Signed zeros prevent all of these transformations,
8582 for reasons given above each one.
8584 Also try swapping the arguments and inverting the conditional. */
8585 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8586 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8587 arg1, TREE_OPERAND (arg0, 1))
8588 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8590 tem = fold_cond_expr_with_comparison (type, arg0,
8591 TREE_OPERAND (t, 2));
8596 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8597 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8598 TREE_OPERAND (t, 2),
8599 TREE_OPERAND (arg0, 1))
8600 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8602 tem = invert_truthvalue (arg0);
8603 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8605 tem = fold_cond_expr_with_comparison (type, tem, arg1);
8611 /* If the second operand is simpler than the third, swap them
8612 since that produces better jump optimization results. */
8613 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8614 TREE_OPERAND (t, 2), false))
8616 /* See if this can be inverted. If it can't, possibly because
8617 it was a floating-point inequality comparison, don't do
8619 tem = invert_truthvalue (arg0);
8621 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8622 return fold (build3 (code, type, tem,
8623 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8626 /* Convert A ? 1 : 0 to simply A. */
8627 if (integer_onep (TREE_OPERAND (t, 1))
8628 && integer_zerop (TREE_OPERAND (t, 2))
8629 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8630 call to fold will try to move the conversion inside
8631 a COND, which will recurse. In that case, the COND_EXPR
8632 is probably the best choice, so leave it alone. */
8633 && type == TREE_TYPE (arg0))
8634 return pedantic_non_lvalue (arg0);
8636 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8637 over COND_EXPR in cases such as floating point comparisons. */
8638 if (integer_zerop (TREE_OPERAND (t, 1))
8639 && integer_onep (TREE_OPERAND (t, 2))
8640 && truth_value_p (TREE_CODE (arg0)))
8641 return pedantic_non_lvalue (fold_convert (type,
8642 invert_truthvalue (arg0)));
8644 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8645 if (TREE_CODE (arg0) == LT_EXPR
8646 && integer_zerop (TREE_OPERAND (arg0, 1))
8647 && integer_zerop (TREE_OPERAND (t, 2))
8648 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8649 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8650 TREE_TYPE (tem), tem, arg1)));
8652 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8653 already handled above. */
8654 if (TREE_CODE (arg0) == BIT_AND_EXPR
8655 && integer_onep (TREE_OPERAND (arg0, 1))
8656 && integer_zerop (TREE_OPERAND (t, 2))
8657 && integer_pow2p (arg1))
8659 tree tem = TREE_OPERAND (arg0, 0);
8661 if (TREE_CODE (tem) == RSHIFT_EXPR
8662 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8663 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8664 return fold (build2 (BIT_AND_EXPR, type,
8665 TREE_OPERAND (tem, 0), arg1));
8668 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8669 is probably obsolete because the first operand should be a
8670 truth value (that's why we have the two cases above), but let's
8671 leave it in until we can confirm this for all front-ends. */
8672 if (integer_zerop (TREE_OPERAND (t, 2))
8673 && TREE_CODE (arg0) == NE_EXPR
8674 && integer_zerop (TREE_OPERAND (arg0, 1))
8675 && integer_pow2p (arg1)
8676 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8677 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8678 arg1, OEP_ONLY_CONST))
8679 return pedantic_non_lvalue (fold_convert (type,
8680 TREE_OPERAND (arg0, 0)));
8682 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8683 if (integer_zerop (TREE_OPERAND (t, 2))
8684 && truth_value_p (TREE_CODE (arg0))
8685 && truth_value_p (TREE_CODE (arg1)))
8686 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8688 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8689 if (integer_onep (TREE_OPERAND (t, 2))
8690 && truth_value_p (TREE_CODE (arg0))
8691 && truth_value_p (TREE_CODE (arg1)))
8693 /* Only perform transformation if ARG0 is easily inverted. */
8694 tem = invert_truthvalue (arg0);
8695 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8696 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8699 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8700 if (integer_zerop (arg1)
8701 && truth_value_p (TREE_CODE (arg0))
8702 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8704 /* Only perform transformation if ARG0 is easily inverted. */
8705 tem = invert_truthvalue (arg0);
8706 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8707 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8708 TREE_OPERAND (t, 2)));
8711 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8712 if (integer_onep (arg1)
8713 && truth_value_p (TREE_CODE (arg0))
8714 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8715 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8716 TREE_OPERAND (t, 2)));
8721 /* When pedantic, a compound expression can be neither an lvalue
8722 nor an integer constant expression. */
8723 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8725 /* Don't let (0, 0) be null pointer constant. */
8726 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8727 : fold_convert (type, arg1);
8728 return pedantic_non_lvalue (tem);
8732 return build_complex (type, arg0, arg1);
8736 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8738 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8739 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8740 TREE_OPERAND (arg0, 1));
8741 else if (TREE_CODE (arg0) == COMPLEX_CST)
8742 return TREE_REALPART (arg0);
8743 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8744 return fold (build2 (TREE_CODE (arg0), type,
8745 fold (build1 (REALPART_EXPR, type,
8746 TREE_OPERAND (arg0, 0))),
8747 fold (build1 (REALPART_EXPR, type,
8748 TREE_OPERAND (arg0, 1)))));
8752 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8753 return fold_convert (type, integer_zero_node);
8754 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8755 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8756 TREE_OPERAND (arg0, 0));
8757 else if (TREE_CODE (arg0) == COMPLEX_CST)
8758 return TREE_IMAGPART (arg0);
8759 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8760 return fold (build2 (TREE_CODE (arg0), type,
8761 fold (build1 (IMAGPART_EXPR, type,
8762 TREE_OPERAND (arg0, 0))),
8763 fold (build1 (IMAGPART_EXPR, type,
8764 TREE_OPERAND (arg0, 1)))));
8767 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8769 case CLEANUP_POINT_EXPR:
8770 if (! has_cleanups (arg0))
8771 return TREE_OPERAND (t, 0);
8774 enum tree_code code0 = TREE_CODE (arg0);
8775 int kind0 = TREE_CODE_CLASS (code0);
8776 tree arg00 = TREE_OPERAND (arg0, 0);
8779 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8780 return fold (build1 (code0, type,
8781 fold (build1 (CLEANUP_POINT_EXPR,
8782 TREE_TYPE (arg00), arg00))));
8784 if (kind0 == '<' || kind0 == '2'
8785 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8786 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8787 || code0 == TRUTH_XOR_EXPR)
8789 arg01 = TREE_OPERAND (arg0, 1);
8791 if (TREE_CONSTANT (arg00)
8792 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8793 && ! has_cleanups (arg00)))
8794 return fold (build2 (code0, type, arg00,
8795 fold (build1 (CLEANUP_POINT_EXPR,
8796 TREE_TYPE (arg01), arg01))));
8798 if (TREE_CONSTANT (arg01))
8799 return fold (build2 (code0, type,
8800 fold (build1 (CLEANUP_POINT_EXPR,
8801 TREE_TYPE (arg00), arg00)),
8809 /* Check for a built-in function. */
8810 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8811 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8813 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8815 tree tmp = fold_builtin (t);
8823 } /* switch (code) */
8826 #ifdef ENABLE_FOLD_CHECKING
8829 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8830 static void fold_check_failed (tree, tree);
8831 void print_fold_checksum (tree);
8833 /* When --enable-checking=fold, compute a digest of expr before
8834 and after actual fold call to see if fold did not accidentally
8835 change original expr. */
8842 unsigned char checksum_before[16], checksum_after[16];
8845 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8846 md5_init_ctx (&ctx);
8847 fold_checksum_tree (expr, &ctx, ht);
8848 md5_finish_ctx (&ctx, checksum_before);
8851 ret = fold_1 (expr);
8853 md5_init_ctx (&ctx);
8854 fold_checksum_tree (expr, &ctx, ht);
8855 md5_finish_ctx (&ctx, checksum_after);
8858 if (memcmp (checksum_before, checksum_after, 16))
8859 fold_check_failed (expr, ret);
8865 print_fold_checksum (tree expr)
8868 unsigned char checksum[16], cnt;
8871 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8872 md5_init_ctx (&ctx);
8873 fold_checksum_tree (expr, &ctx, ht);
8874 md5_finish_ctx (&ctx, checksum);
8876 for (cnt = 0; cnt < 16; ++cnt)
8877 fprintf (stderr, "%02x", checksum[cnt]);
8878 putc ('\n', stderr);
8882 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8884 internal_error ("fold check: original tree changed by fold");
8888 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8891 enum tree_code code;
8892 char buf[sizeof (struct tree_decl)];
8895 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8896 > sizeof (struct tree_decl)
8897 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8901 slot = htab_find_slot (ht, expr, INSERT);
8905 code = TREE_CODE (expr);
8906 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8908 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8909 memcpy (buf, expr, tree_size (expr));
8911 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8913 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8915 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8916 memcpy (buf, expr, tree_size (expr));
8918 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8920 else if (TREE_CODE_CLASS (code) == 't'
8921 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8923 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8924 memcpy (buf, expr, tree_size (expr));
8926 TYPE_POINTER_TO (expr) = NULL;
8927 TYPE_REFERENCE_TO (expr) = NULL;
8929 md5_process_bytes (expr, tree_size (expr), ctx);
8930 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8931 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8932 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8933 len = TREE_CODE_LENGTH (code);
8934 switch (TREE_CODE_CLASS (code))
8940 md5_process_bytes (TREE_STRING_POINTER (expr),
8941 TREE_STRING_LENGTH (expr), ctx);
8944 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8945 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8948 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8958 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8959 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8962 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8963 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8972 case SAVE_EXPR: len = 2; break;
8973 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8974 case RTL_EXPR: len = 0; break;
8975 case WITH_CLEANUP_EXPR: len = 2; break;
8984 for (i = 0; i < len; ++i)
8985 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8988 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8989 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8990 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8991 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8992 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8993 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8994 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8995 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8996 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8997 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8998 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9001 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9002 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9003 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9004 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9005 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9006 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9007 if (INTEGRAL_TYPE_P (expr)
9008 || SCALAR_FLOAT_TYPE_P (expr))
9010 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9011 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9013 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9014 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9015 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9024 /* Perform constant folding and related simplification of initializer
9025 expression EXPR. This behaves identically to "fold" but ignores
9026 potential run-time traps and exceptions that fold must preserve. */
9029 fold_initializer (tree expr)
9031 int saved_signaling_nans = flag_signaling_nans;
9032 int saved_trapping_math = flag_trapping_math;
9033 int saved_trapv = flag_trapv;
9036 flag_signaling_nans = 0;
9037 flag_trapping_math = 0;
9040 result = fold (expr);
9042 flag_signaling_nans = saved_signaling_nans;
9043 flag_trapping_math = saved_trapping_math;
9044 flag_trapv = saved_trapv;
9049 /* Determine if first argument is a multiple of second argument. Return 0 if
9050 it is not, or we cannot easily determined it to be.
9052 An example of the sort of thing we care about (at this point; this routine
9053 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9054 fold cases do now) is discovering that
9056 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9062 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9064 This code also handles discovering that
9066 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9068 is a multiple of 8 so we don't have to worry about dealing with a
9071 Note that we *look* inside a SAVE_EXPR only to determine how it was
9072 calculated; it is not safe for fold to do much of anything else with the
9073 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9074 at run time. For example, the latter example above *cannot* be implemented
9075 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9076 evaluation time of the original SAVE_EXPR is not necessarily the same at
9077 the time the new expression is evaluated. The only optimization of this
9078 sort that would be valid is changing
9080 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9084 SAVE_EXPR (I) * SAVE_EXPR (J)
9086 (where the same SAVE_EXPR (J) is used in the original and the
9087 transformed version). */
9090 multiple_of_p (tree type, tree top, tree bottom)
9092 if (operand_equal_p (top, bottom, 0))
9095 if (TREE_CODE (type) != INTEGER_TYPE)
9098 switch (TREE_CODE (top))
9101 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9102 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9106 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9107 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9110 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9114 op1 = TREE_OPERAND (top, 1);
9115 /* const_binop may not detect overflow correctly,
9116 so check for it explicitly here. */
9117 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9118 > TREE_INT_CST_LOW (op1)
9119 && TREE_INT_CST_HIGH (op1) == 0
9120 && 0 != (t1 = fold_convert (type,
9121 const_binop (LSHIFT_EXPR,
9124 && ! TREE_OVERFLOW (t1))
9125 return multiple_of_p (type, t1, bottom);
9130 /* Can't handle conversions from non-integral or wider integral type. */
9131 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9132 || (TYPE_PRECISION (type)
9133 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9136 /* .. fall through ... */
9139 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9142 if (TREE_CODE (bottom) != INTEGER_CST
9143 || (TYPE_UNSIGNED (type)
9144 && (tree_int_cst_sgn (top) < 0
9145 || tree_int_cst_sgn (bottom) < 0)))
9147 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9155 /* Return true if `t' is known to be non-negative. */
9158 tree_expr_nonnegative_p (tree t)
9160 switch (TREE_CODE (t))
9166 return tree_int_cst_sgn (t) >= 0;
9169 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9172 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9173 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9174 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9176 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9177 both unsigned and at least 2 bits shorter than the result. */
9178 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9179 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9180 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9182 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9183 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9184 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9185 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9187 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9188 TYPE_PRECISION (inner2)) + 1;
9189 return prec < TYPE_PRECISION (TREE_TYPE (t));
9195 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9197 /* x * x for floating point x is always non-negative. */
9198 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9200 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9201 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9204 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9205 both unsigned and their total bits is shorter than the result. */
9206 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9207 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9208 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9210 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9211 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9212 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9213 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9214 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9215 < TYPE_PRECISION (TREE_TYPE (t));
9219 case TRUNC_DIV_EXPR:
9221 case FLOOR_DIV_EXPR:
9222 case ROUND_DIV_EXPR:
9223 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9224 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9226 case TRUNC_MOD_EXPR:
9228 case FLOOR_MOD_EXPR:
9229 case ROUND_MOD_EXPR:
9230 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9233 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9234 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9237 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9238 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9241 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9242 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9246 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9247 tree outer_type = TREE_TYPE (t);
9249 if (TREE_CODE (outer_type) == REAL_TYPE)
9251 if (TREE_CODE (inner_type) == REAL_TYPE)
9252 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9253 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9255 if (TYPE_UNSIGNED (inner_type))
9257 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9260 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9262 if (TREE_CODE (inner_type) == REAL_TYPE)
9263 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9264 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9265 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9266 && TYPE_UNSIGNED (inner_type);
9272 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9273 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9275 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9277 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9278 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9280 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9281 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9283 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9285 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9287 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9288 case NON_LVALUE_EXPR:
9289 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9291 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9293 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
9297 tree temp = TARGET_EXPR_SLOT (t);
9298 t = TARGET_EXPR_INITIAL (t);
9300 /* If the initializer is non-void, then it's a normal expression
9301 that will be assigned to the slot. */
9302 if (!VOID_TYPE_P (t))
9303 return tree_expr_nonnegative_p (t);
9305 /* Otherwise, the initializer sets the slot in some way. One common
9306 way is an assignment statement at the end of the initializer. */
9309 if (TREE_CODE (t) == BIND_EXPR)
9310 t = expr_last (BIND_EXPR_BODY (t));
9311 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9312 || TREE_CODE (t) == TRY_CATCH_EXPR)
9313 t = expr_last (TREE_OPERAND (t, 0));
9314 else if (TREE_CODE (t) == STATEMENT_LIST)
9319 if (TREE_CODE (t) == MODIFY_EXPR
9320 && TREE_OPERAND (t, 0) == temp)
9321 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9328 tree fndecl = get_callee_fndecl (t);
9329 tree arglist = TREE_OPERAND (t, 1);
9331 && DECL_BUILT_IN (fndecl)
9332 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9333 switch (DECL_FUNCTION_CODE (fndecl))
9335 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9336 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9337 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9338 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9340 CASE_BUILTIN_F (BUILT_IN_ACOS)
9341 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9342 CASE_BUILTIN_F (BUILT_IN_CABS)
9343 CASE_BUILTIN_F (BUILT_IN_COSH)
9344 CASE_BUILTIN_F (BUILT_IN_ERFC)
9345 CASE_BUILTIN_F (BUILT_IN_EXP)
9346 CASE_BUILTIN_F (BUILT_IN_EXP10)
9347 CASE_BUILTIN_F (BUILT_IN_EXP2)
9348 CASE_BUILTIN_F (BUILT_IN_FABS)
9349 CASE_BUILTIN_F (BUILT_IN_FDIM)
9350 CASE_BUILTIN_F (BUILT_IN_FREXP)
9351 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9352 CASE_BUILTIN_F (BUILT_IN_POW10)
9353 CASE_BUILTIN_I (BUILT_IN_FFS)
9354 CASE_BUILTIN_I (BUILT_IN_PARITY)
9355 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9359 CASE_BUILTIN_F (BUILT_IN_SQRT)
9360 /* sqrt(-0.0) is -0.0. */
9361 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9363 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9365 CASE_BUILTIN_F (BUILT_IN_ASINH)
9366 CASE_BUILTIN_F (BUILT_IN_ATAN)
9367 CASE_BUILTIN_F (BUILT_IN_ATANH)
9368 CASE_BUILTIN_F (BUILT_IN_CBRT)
9369 CASE_BUILTIN_F (BUILT_IN_CEIL)
9370 CASE_BUILTIN_F (BUILT_IN_ERF)
9371 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9372 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9373 CASE_BUILTIN_F (BUILT_IN_FMOD)
9374 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9375 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9376 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9377 CASE_BUILTIN_F (BUILT_IN_LRINT)
9378 CASE_BUILTIN_F (BUILT_IN_LROUND)
9379 CASE_BUILTIN_F (BUILT_IN_MODF)
9380 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9381 CASE_BUILTIN_F (BUILT_IN_POW)
9382 CASE_BUILTIN_F (BUILT_IN_RINT)
9383 CASE_BUILTIN_F (BUILT_IN_ROUND)
9384 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9385 CASE_BUILTIN_F (BUILT_IN_SINH)
9386 CASE_BUILTIN_F (BUILT_IN_TANH)
9387 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9388 /* True if the 1st argument is nonnegative. */
9389 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9391 CASE_BUILTIN_F (BUILT_IN_FMAX)
9392 /* True if the 1st OR 2nd arguments are nonnegative. */
9393 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9394 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9396 CASE_BUILTIN_F (BUILT_IN_FMIN)
9397 /* True if the 1st AND 2nd arguments are nonnegative. */
9398 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9399 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9401 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9402 /* True if the 2nd argument is nonnegative. */
9403 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9407 #undef CASE_BUILTIN_F
9408 #undef CASE_BUILTIN_I
9412 /* ... fall through ... */
9415 if (truth_value_p (TREE_CODE (t)))
9416 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9420 /* We don't know sign of `t', so be conservative and return false. */
9424 /* Return true when T is an address and is known to be nonzero.
9425 For floating point we further ensure that T is not denormal.
9426 Similar logic is present in nonzero_address in rtlanal.h */
9429 tree_expr_nonzero_p (tree t)
9431 tree type = TREE_TYPE (t);
9433 /* Doing something useful for floating point would need more work. */
9434 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9437 switch (TREE_CODE (t))
9440 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9441 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9444 return !integer_zerop (t);
9447 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9449 /* With the presence of negative values it is hard
9450 to say something. */
9451 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9452 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9454 /* One of operands must be positive and the other non-negative. */
9455 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9456 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9461 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9463 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9464 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9470 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9471 tree outer_type = TREE_TYPE (t);
9473 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9474 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9479 /* Weak declarations may link to NULL. */
9480 if (DECL_P (TREE_OPERAND (t, 0)))
9481 return !DECL_WEAK (TREE_OPERAND (t, 0));
9482 /* Constants and all other cases are never weak. */
9486 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9487 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9490 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9491 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9494 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9496 /* When both operands are nonzero, then MAX must be too. */
9497 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9500 /* MAX where operand 0 is positive is positive. */
9501 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9503 /* MAX where operand 1 is positive is positive. */
9504 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9505 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9512 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9515 case NON_LVALUE_EXPR:
9516 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9519 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9520 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9528 /* Return true if `r' is known to be non-negative.
9529 Only handles constants at the moment. */
9532 rtl_expr_nonnegative_p (rtx r)
9534 switch (GET_CODE (r))
9537 return INTVAL (r) >= 0;
9540 if (GET_MODE (r) == VOIDmode)
9541 return CONST_DOUBLE_HIGH (r) >= 0;
9549 units = CONST_VECTOR_NUNITS (r);
9551 for (i = 0; i < units; ++i)
9553 elt = CONST_VECTOR_ELT (r, i);
9554 if (!rtl_expr_nonnegative_p (elt))
9563 /* These are always nonnegative. */
9572 /* See if we are applying CODE, a relational to the highest or lowest
9573 possible integer of TYPE. If so, then the result is a compile
9577 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9582 enum tree_code code = *code_p;
9583 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9585 if (TREE_CODE (op1) == INTEGER_CST
9586 && ! TREE_CONSTANT_OVERFLOW (op1)
9587 && width <= HOST_BITS_PER_WIDE_INT
9588 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9589 || POINTER_TYPE_P (TREE_TYPE (op1))))
9591 unsigned HOST_WIDE_INT signed_max;
9592 unsigned HOST_WIDE_INT max, min;
9594 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9596 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9598 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9604 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9607 if (TREE_INT_CST_HIGH (op1) == 0
9608 && TREE_INT_CST_LOW (op1) == max)
9612 return omit_one_operand (type, integer_zero_node, op0);
9618 return omit_one_operand (type, integer_one_node, op0);
9624 /* The GE_EXPR and LT_EXPR cases above are not normally
9625 reached because of previous transformations. */
9630 else if (TREE_INT_CST_HIGH (op1) == 0
9631 && TREE_INT_CST_LOW (op1) == max - 1)
9636 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9640 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9645 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9646 && TREE_INT_CST_LOW (op1) == min)
9650 return omit_one_operand (type, integer_zero_node, op0);
9657 return omit_one_operand (type, integer_one_node, op0);
9666 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9667 && TREE_INT_CST_LOW (op1) == min + 1)
9672 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9676 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9682 else if (TREE_INT_CST_HIGH (op1) == 0
9683 && TREE_INT_CST_LOW (op1) == signed_max
9684 && TYPE_UNSIGNED (TREE_TYPE (op1))
9685 /* signed_type does not work on pointer types. */
9686 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9688 /* The following case also applies to X < signed_max+1
9689 and X >= signed_max+1 because previous transformations. */
9690 if (code == LE_EXPR || code == GT_EXPR)
9692 tree st0, st1, exp, retval;
9693 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9694 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9696 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9698 fold_convert (st0, op0),
9699 fold_convert (st1, integer_zero_node));
9702 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9704 TREE_OPERAND (exp, 0),
9705 TREE_OPERAND (exp, 1));
9707 /* If we are in gimple form, then returning EXP would create
9708 non-gimple expressions. Clearing it is safe and insures
9709 we do not allow a non-gimple expression to escape. */
9713 return (retval ? retval : exp);
9722 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9723 attempt to fold the expression to a constant without modifying TYPE,
9726 If the expression could be simplified to a constant, then return
9727 the constant. If the expression would not be simplified to a
9728 constant, then return NULL_TREE.
9730 Note this is primarily designed to be called after gimplification
9731 of the tree structures and when at least one operand is a constant.
9732 As a result of those simplifying assumptions this routine is far
9733 simpler than the generic fold routine. */
9736 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9744 /* If this is a commutative operation, and ARG0 is a constant, move it
9745 to ARG1 to reduce the number of tests below. */
9746 if (commutative_tree_code (code)
9747 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9754 /* If either operand is a complex type, extract its real component. */
9755 if (TREE_CODE (op0) == COMPLEX_CST)
9756 subop0 = TREE_REALPART (op0);
9760 if (TREE_CODE (op1) == COMPLEX_CST)
9761 subop1 = TREE_REALPART (op1);
9765 /* Note if either argument is not a real or integer constant.
9766 With a few exceptions, simplification is limited to cases
9767 where both arguments are constants. */
9768 if ((TREE_CODE (subop0) != INTEGER_CST
9769 && TREE_CODE (subop0) != REAL_CST)
9770 || (TREE_CODE (subop1) != INTEGER_CST
9771 && TREE_CODE (subop1) != REAL_CST))
9777 /* (plus (address) (const_int)) is a constant. */
9778 if (TREE_CODE (op0) == PLUS_EXPR
9779 && TREE_CODE (op1) == INTEGER_CST
9780 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9781 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9782 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9784 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9786 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9787 const_binop (PLUS_EXPR, op1,
9788 TREE_OPERAND (op0, 1), 0));
9796 /* Both arguments are constants. Simplify. */
9797 tem = const_binop (code, op0, op1, 0);
9798 if (tem != NULL_TREE)
9800 /* The return value should always have the same type as
9801 the original expression. */
9802 if (TREE_TYPE (tem) != type)
9803 tem = fold_convert (type, tem);
9810 /* Fold &x - &x. This can happen from &x.foo - &x.
9811 This is unsafe for certain floats even in non-IEEE formats.
9812 In IEEE, it is unsafe because it does wrong for NaNs.
9813 Also note that operand_equal_p is always false if an
9814 operand is volatile. */
9815 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9816 return fold_convert (type, integer_zero_node);
9822 /* Special case multiplication or bitwise AND where one argument
9824 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9825 return omit_one_operand (type, op1, op0);
9827 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9828 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9829 && real_zerop (op1))
9830 return omit_one_operand (type, op1, op0);
9835 /* Special case when we know the result will be all ones. */
9836 if (integer_all_onesp (op1))
9837 return omit_one_operand (type, op1, op0);
9841 case TRUNC_DIV_EXPR:
9842 case ROUND_DIV_EXPR:
9843 case FLOOR_DIV_EXPR:
9845 case EXACT_DIV_EXPR:
9846 case TRUNC_MOD_EXPR:
9847 case ROUND_MOD_EXPR:
9848 case FLOOR_MOD_EXPR:
9851 /* Division by zero is undefined. */
9852 if (integer_zerop (op1))
9855 if (TREE_CODE (op1) == REAL_CST
9856 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9857 && real_zerop (op1))
9863 if (INTEGRAL_TYPE_P (type)
9864 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9865 return omit_one_operand (type, op1, op0);
9870 if (INTEGRAL_TYPE_P (type)
9871 && TYPE_MAX_VALUE (type)
9872 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9873 return omit_one_operand (type, op1, op0);
9878 /* Optimize -1 >> x for arithmetic right shifts. */
9879 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9880 return omit_one_operand (type, op0, op1);
9881 /* ... fall through ... */
9884 if (integer_zerop (op0))
9885 return omit_one_operand (type, op0, op1);
9887 /* Since negative shift count is not well-defined, don't
9888 try to compute it in the compiler. */
9889 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9896 /* -1 rotated either direction by any amount is still -1. */
9897 if (integer_all_onesp (op0))
9898 return omit_one_operand (type, op0, op1);
9900 /* 0 rotated either direction by any amount is still zero. */
9901 if (integer_zerop (op0))
9902 return omit_one_operand (type, op0, op1);
9908 return build_complex (type, op0, op1);
9917 /* If one arg is a real or integer constant, put it last. */
9918 if ((TREE_CODE (op0) == INTEGER_CST
9919 && TREE_CODE (op1) != INTEGER_CST)
9920 || (TREE_CODE (op0) == REAL_CST
9921 && TREE_CODE (op0) != REAL_CST))
9928 code = swap_tree_comparison (code);
9931 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9932 This transformation affects the cases which are handled in later
9933 optimizations involving comparisons with non-negative constants. */
9934 if (TREE_CODE (op1) == INTEGER_CST
9935 && TREE_CODE (op0) != INTEGER_CST
9936 && tree_int_cst_sgn (op1) > 0)
9942 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9947 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9955 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
9962 case UNORDERED_EXPR:
9972 return fold_relational_const (code, type, op0, op1);
9975 /* This could probably be handled. */
9978 case TRUTH_AND_EXPR:
9979 /* If second arg is constant zero, result is zero, but first arg
9980 must be evaluated. */
9981 if (integer_zerop (op1))
9982 return omit_one_operand (type, op1, op0);
9983 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9984 case will be handled here. */
9985 if (integer_zerop (op0))
9986 return omit_one_operand (type, op0, op1);
9987 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9988 return constant_boolean_node (true, type);
9992 /* If second arg is constant true, result is true, but we must
9993 evaluate first arg. */
9994 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
9995 return omit_one_operand (type, op1, op0);
9996 /* Likewise for first arg, but note this only occurs here for
9998 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
9999 return omit_one_operand (type, op0, op1);
10000 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10001 return constant_boolean_node (false, type);
10004 case TRUTH_XOR_EXPR:
10005 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10007 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10008 return constant_boolean_node (x, type);
10017 /* Given the components of a unary expression CODE, TYPE and OP0,
10018 attempt to fold the expression to a constant without modifying
10021 If the expression could be simplified to a constant, then return
10022 the constant. If the expression would not be simplified to a
10023 constant, then return NULL_TREE.
10025 Note this is primarily designed to be called after gimplification
10026 of the tree structures and when op0 is a constant. As a result
10027 of those simplifying assumptions this routine is far simpler than
10028 the generic fold routine. */
10031 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10034 /* Make sure we have a suitable constant argument. */
10035 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10039 if (TREE_CODE (op0) == COMPLEX_CST)
10040 subop = TREE_REALPART (op0);
10044 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10053 case FIX_TRUNC_EXPR:
10054 case FIX_FLOOR_EXPR:
10055 case FIX_CEIL_EXPR:
10056 return fold_convert_const (code, type, op0);
10059 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10060 return fold_negate_const (op0, type);
10065 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10066 return fold_abs_const (op0, type);
10071 if (TREE_CODE (op0) == INTEGER_CST)
10072 return fold_not_const (op0, type);
10076 case REALPART_EXPR:
10077 if (TREE_CODE (op0) == COMPLEX_CST)
10078 return TREE_REALPART (op0);
10082 case IMAGPART_EXPR:
10083 if (TREE_CODE (op0) == COMPLEX_CST)
10084 return TREE_IMAGPART (op0);
10089 if (TREE_CODE (op0) == COMPLEX_CST
10090 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10091 return build_complex (type, TREE_REALPART (op0),
10092 negate_expr (TREE_IMAGPART (op0)));
10100 /* If EXP represents referencing an element in a constant string
10101 (either via pointer arithmetic or array indexing), return the
10102 tree representing the value accessed, otherwise return NULL. */
10105 fold_read_from_constant_string (tree exp)
10107 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10109 tree exp1 = TREE_OPERAND (exp, 0);
10113 if (TREE_CODE (exp) == INDIRECT_REF)
10114 string = string_constant (exp1, &index);
10117 tree low_bound = array_ref_low_bound (exp);
10118 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10120 /* Optimize the special-case of a zero lower bound.
10122 We convert the low_bound to sizetype to avoid some problems
10123 with constant folding. (E.g. suppose the lower bound is 1,
10124 and its mode is QI. Without the conversion,l (ARRAY
10125 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10126 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10127 if (! integer_zerop (low_bound))
10128 index = size_diffop (index, fold_convert (sizetype, low_bound));
10134 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10135 && TREE_CODE (string) == STRING_CST
10136 && TREE_CODE (index) == INTEGER_CST
10137 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10138 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10140 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10141 return fold_convert (TREE_TYPE (exp),
10142 build_int_2 ((TREE_STRING_POINTER (string)
10143 [TREE_INT_CST_LOW (index)]), 0));
10148 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10149 an integer constant or real constant.
10151 TYPE is the type of the result. */
10154 fold_negate_const (tree arg0, tree type)
10156 tree t = NULL_TREE;
10158 if (TREE_CODE (arg0) == INTEGER_CST)
10160 unsigned HOST_WIDE_INT low;
10161 HOST_WIDE_INT high;
10162 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10163 TREE_INT_CST_HIGH (arg0),
10165 t = build_int_2 (low, high);
10166 TREE_TYPE (t) = type;
10168 = (TREE_OVERFLOW (arg0)
10169 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
10170 TREE_CONSTANT_OVERFLOW (t)
10171 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10173 else if (TREE_CODE (arg0) == REAL_CST)
10174 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10175 #ifdef ENABLE_CHECKING
10183 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10184 an integer constant or real constant.
10186 TYPE is the type of the result. */
10189 fold_abs_const (tree arg0, tree type)
10191 tree t = NULL_TREE;
10193 if (TREE_CODE (arg0) == INTEGER_CST)
10195 /* If the value is unsigned, then the absolute value is
10196 the same as the ordinary value. */
10197 if (TYPE_UNSIGNED (type))
10199 /* Similarly, if the value is non-negative. */
10200 else if (INT_CST_LT (integer_minus_one_node, arg0))
10202 /* If the value is negative, then the absolute value is
10206 unsigned HOST_WIDE_INT low;
10207 HOST_WIDE_INT high;
10208 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10209 TREE_INT_CST_HIGH (arg0),
10211 t = build_int_2 (low, high);
10212 TREE_TYPE (t) = type;
10214 = (TREE_OVERFLOW (arg0)
10215 | force_fit_type (t, overflow));
10216 TREE_CONSTANT_OVERFLOW (t)
10217 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10221 else if (TREE_CODE (arg0) == REAL_CST)
10223 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10224 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10228 #ifdef ENABLE_CHECKING
10236 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10237 constant. TYPE is the type of the result. */
10240 fold_not_const (tree arg0, tree type)
10242 tree t = NULL_TREE;
10244 if (TREE_CODE (arg0) == INTEGER_CST)
10246 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
10247 ~ TREE_INT_CST_HIGH (arg0));
10248 TREE_TYPE (t) = type;
10249 force_fit_type (t, 0);
10250 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
10251 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
10253 #ifdef ENABLE_CHECKING
10261 /* Given CODE, a relational operator, the target type, TYPE and two
10262 constant operands OP0 and OP1, return the result of the
10263 relational operation. If the result is not a compile time
10264 constant, then return NULL_TREE. */
10267 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10269 int result, invert;
10271 /* From here on, the only cases we handle are when the result is
10272 known to be a constant. */
10274 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10276 /* Handle the cases where either operand is a NaN. */
10277 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
10278 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
10288 case UNORDERED_EXPR:
10302 if (flag_trapping_math)
10311 return constant_boolean_node (result, type);
10314 /* From here on we're sure there are no NaNs. */
10318 return constant_boolean_node (true, type);
10320 case UNORDERED_EXPR:
10321 return constant_boolean_node (false, type);
10347 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10349 To compute GT, swap the arguments and do LT.
10350 To compute GE, do LT and invert the result.
10351 To compute LE, swap the arguments, do LT and invert the result.
10352 To compute NE, do EQ and invert the result.
10354 Therefore, the code below must handle only EQ and LT. */
10356 if (code == LE_EXPR || code == GT_EXPR)
10361 code = swap_tree_comparison (code);
10364 /* Note that it is safe to invert for real values here because we
10365 have already handled the one case that it matters. */
10368 if (code == NE_EXPR || code == GE_EXPR)
10371 code = invert_tree_comparison (code, false);
10374 /* Compute a result for LT or EQ if args permit;
10375 Otherwise return T. */
10376 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10378 if (code == EQ_EXPR)
10379 result = tree_int_cst_equal (op0, op1);
10380 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10381 result = INT_CST_LT_UNSIGNED (op0, op1);
10383 result = INT_CST_LT (op0, op1);
10386 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
10387 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
10390 /* Two real constants can be compared explicitly. */
10391 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10393 if (code == EQ_EXPR)
10394 result = REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
10395 TREE_REAL_CST (op1));
10397 result = REAL_VALUES_LESS (TREE_REAL_CST (op0),
10398 TREE_REAL_CST (op1));
10405 return constant_boolean_node (result, type);
10408 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10409 avoid confusing the gimplify process. */
10412 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10414 if (TREE_CODE (t) == INDIRECT_REF)
10416 t = TREE_OPERAND (t, 0);
10417 if (TREE_TYPE (t) != ptrtype)
10418 t = build1 (NOP_EXPR, ptrtype, t);
10424 while (handled_component_p (base)
10425 || TREE_CODE (base) == REALPART_EXPR
10426 || TREE_CODE (base) == IMAGPART_EXPR)
10427 base = TREE_OPERAND (base, 0);
10429 TREE_ADDRESSABLE (base) = 1;
10431 t = build1 (ADDR_EXPR, ptrtype, t);
10438 build_fold_addr_expr (tree t)
10440 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10443 /* Builds an expression for an indirection through T, simplifying some
10447 build_fold_indirect_ref (tree t)
10449 tree type = TREE_TYPE (TREE_TYPE (t));
10454 if (TREE_CODE (sub) == ADDR_EXPR)
10456 tree op = TREE_OPERAND (sub, 0);
10457 tree optype = TREE_TYPE (op);
10459 if (lang_hooks.types_compatible_p (type, optype))
10461 /* *(foo *)&fooarray => fooarray[0] */
10462 else if (TREE_CODE (optype) == ARRAY_TYPE
10463 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10464 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10467 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10468 subtype = TREE_TYPE (sub);
10469 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10470 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10472 sub = build_fold_indirect_ref (sub);
10473 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10476 return build1 (INDIRECT_REF, type, t);
10479 #include "gt-fold-const.h"