1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum tree_code swap_tree_comparison (enum tree_code);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree make_range (tree, int *, tree *, tree *);
116 static tree build_range_check (tree, tree, int, tree, tree);
117 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
119 static tree fold_range_test (tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static bool tree_swap_operands_p (tree, tree, bool);
137 static tree fold_negate_const (tree, tree);
138 static tree fold_not_const (tree, tree);
139 static tree fold_relational_const (enum tree_code, tree, tree, tree);
140 static tree fold_relational_hi_lo (enum tree_code *, const tree,
143 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
144 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
145 and SUM1. Then this yields nonzero if overflow occurred during the
148 Overflow occurs if A and B have the same sign, but A and SUM differ in
149 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
151 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
153 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
154 We do that by representing the two-word integer in 4 words, with only
155 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
156 number. The value of the word is LOWPART + HIGHPART * BASE. */
159 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
160 #define HIGHPART(x) \
161 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
162 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
164 /* Unpack a two-word integer into 4 words.
165 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
166 WORDS points to the array of HOST_WIDE_INTs. */
169 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
171 words[0] = LOWPART (low);
172 words[1] = HIGHPART (low);
173 words[2] = LOWPART (hi);
174 words[3] = HIGHPART (hi);
177 /* Pack an array of 4 words into a two-word integer.
178 WORDS points to the array of words.
179 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
182 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
185 *low = words[0] + words[1] * BASE;
186 *hi = words[2] + words[3] * BASE;
189 /* Make the integer constant T valid for its type by setting to 0 or 1 all
190 the bits in the constant that don't belong in the type.
192 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
193 nonzero, a signed overflow has already occurred in calculating T, so
197 force_fit_type (tree t, int overflow)
199 unsigned HOST_WIDE_INT low;
203 if (TREE_CODE (t) == REAL_CST)
205 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
206 Consider doing it via real_convert now. */
210 else if (TREE_CODE (t) != INTEGER_CST)
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
220 prec = TYPE_PRECISION (TREE_TYPE (t));
222 /* First clear all bits that are beyond the type's precision. */
224 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 TREE_INT_CST_HIGH (t)
228 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 TREE_INT_CST_HIGH (t) = 0;
232 if (prec < HOST_BITS_PER_WIDE_INT)
233 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
236 /* Unsigned types do not suffer sign extension or overflow unless they
238 if (TYPE_UNSIGNED (TREE_TYPE (t))
239 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
240 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
243 /* If the value's sign bit is set, extend the sign. */
244 if (prec != 2 * HOST_BITS_PER_WIDE_INT
245 && (prec > HOST_BITS_PER_WIDE_INT
246 ? 0 != (TREE_INT_CST_HIGH (t)
248 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
249 : 0 != (TREE_INT_CST_LOW (t)
250 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
252 /* Value is negative:
253 set to 1 all the bits that are outside this type's precision. */
254 if (prec > HOST_BITS_PER_WIDE_INT)
255 TREE_INT_CST_HIGH (t)
256 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
259 TREE_INT_CST_HIGH (t) = -1;
260 if (prec < HOST_BITS_PER_WIDE_INT)
261 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
265 /* Return nonzero if signed overflow occurred. */
267 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
271 /* Add two doubleword integers with doubleword result.
272 Each argument is given as two `HOST_WIDE_INT' pieces.
273 One argument is L1 and H1; the other, L2 and H2.
274 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
277 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
278 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
279 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
281 unsigned HOST_WIDE_INT l;
285 h = h1 + h2 + (l < l1);
289 return OVERFLOW_SUM_SIGN (h1, h2, h);
292 /* Negate a doubleword integer with doubleword result.
293 Return nonzero if the operation overflows, assuming it's signed.
294 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
295 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
298 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
299 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
305 return (*hv & h1) < 0;
315 /* Multiply two doubleword integers with doubleword result.
316 Return nonzero if the operation overflows, assuming it's signed.
317 Each argument is given as two `HOST_WIDE_INT' pieces.
318 One argument is L1 and H1; the other, L2 and H2.
319 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
322 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
323 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
324 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
326 HOST_WIDE_INT arg1[4];
327 HOST_WIDE_INT arg2[4];
328 HOST_WIDE_INT prod[4 * 2];
329 unsigned HOST_WIDE_INT carry;
331 unsigned HOST_WIDE_INT toplow, neglow;
332 HOST_WIDE_INT tophigh, neghigh;
334 encode (arg1, l1, h1);
335 encode (arg2, l2, h2);
337 memset (prod, 0, sizeof prod);
339 for (i = 0; i < 4; i++)
342 for (j = 0; j < 4; j++)
345 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
346 carry += arg1[i] * arg2[j];
347 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
349 prod[k] = LOWPART (carry);
350 carry = HIGHPART (carry);
355 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
357 /* Check for overflow by calculating the top half of the answer in full;
358 it should agree with the low half's sign bit. */
359 decode (prod + 4, &toplow, &tophigh);
362 neg_double (l2, h2, &neglow, &neghigh);
363 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
367 neg_double (l1, h1, &neglow, &neghigh);
368 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
370 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
373 /* Shift the doubleword integer in L1, H1 left by COUNT places
374 keeping only PREC bits of result.
375 Shift right if COUNT is negative.
376 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
377 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
380 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
381 HOST_WIDE_INT count, unsigned int prec,
382 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
384 unsigned HOST_WIDE_INT signmask;
388 rshift_double (l1, h1, -count, prec, lv, hv, arith);
392 if (SHIFT_COUNT_TRUNCATED)
395 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
397 /* Shifting by the host word size is undefined according to the
398 ANSI standard, so we must handle this as a special case. */
402 else if (count >= HOST_BITS_PER_WIDE_INT)
404 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
409 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
410 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
414 /* Sign extend all bits that are beyond the precision. */
416 signmask = -((prec > HOST_BITS_PER_WIDE_INT
417 ? ((unsigned HOST_WIDE_INT) *hv
418 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
419 : (*lv >> (prec - 1))) & 1);
421 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
423 else if (prec >= HOST_BITS_PER_WIDE_INT)
425 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
426 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
431 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
432 *lv |= signmask << prec;
436 /* Shift the doubleword integer in L1, H1 right by COUNT places
437 keeping only PREC bits of result. COUNT must be positive.
438 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
439 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
442 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
443 HOST_WIDE_INT count, unsigned int prec,
444 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
447 unsigned HOST_WIDE_INT signmask;
450 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
453 if (SHIFT_COUNT_TRUNCATED)
456 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
458 /* Shifting by the host word size is undefined according to the
459 ANSI standard, so we must handle this as a special case. */
463 else if (count >= HOST_BITS_PER_WIDE_INT)
466 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
470 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
472 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
475 /* Zero / sign extend all bits that are beyond the precision. */
477 if (count >= (HOST_WIDE_INT)prec)
482 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
484 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
486 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
487 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
492 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
493 *lv |= signmask << (prec - count);
497 /* Rotate the doubleword integer in L1, H1 left by COUNT places
498 keeping only PREC bits of result.
499 Rotate right if COUNT is negative.
500 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
503 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
504 HOST_WIDE_INT count, unsigned int prec,
505 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
507 unsigned HOST_WIDE_INT s1l, s2l;
508 HOST_WIDE_INT s1h, s2h;
514 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
515 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
520 /* Rotate the doubleword integer in L1, H1 left by COUNT places
521 keeping only PREC bits of result. COUNT must be positive.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
525 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
526 HOST_WIDE_INT count, unsigned int prec,
527 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
529 unsigned HOST_WIDE_INT s1l, s2l;
530 HOST_WIDE_INT s1h, s2h;
536 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
537 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
542 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
543 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
544 CODE is a tree code for a kind of division, one of
545 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
547 It controls how the quotient is rounded to an integer.
548 Return nonzero if the operation overflows.
549 UNS nonzero says do unsigned division. */
552 div_and_round_double (enum tree_code code, int uns,
553 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
554 HOST_WIDE_INT hnum_orig,
555 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
556 HOST_WIDE_INT hden_orig,
557 unsigned HOST_WIDE_INT *lquo,
558 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
562 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
563 HOST_WIDE_INT den[4], quo[4];
565 unsigned HOST_WIDE_INT work;
566 unsigned HOST_WIDE_INT carry = 0;
567 unsigned HOST_WIDE_INT lnum = lnum_orig;
568 HOST_WIDE_INT hnum = hnum_orig;
569 unsigned HOST_WIDE_INT lden = lden_orig;
570 HOST_WIDE_INT hden = hden_orig;
573 if (hden == 0 && lden == 0)
574 overflow = 1, lden = 1;
576 /* Calculate quotient sign and convert operands to unsigned. */
582 /* (minimum integer) / (-1) is the only overflow case. */
583 if (neg_double (lnum, hnum, &lnum, &hnum)
584 && ((HOST_WIDE_INT) lden & hden) == -1)
590 neg_double (lden, hden, &lden, &hden);
594 if (hnum == 0 && hden == 0)
595 { /* single precision */
597 /* This unsigned division rounds toward zero. */
603 { /* trivial case: dividend < divisor */
604 /* hden != 0 already checked. */
611 memset (quo, 0, sizeof quo);
613 memset (num, 0, sizeof num); /* to zero 9th element */
614 memset (den, 0, sizeof den);
616 encode (num, lnum, hnum);
617 encode (den, lden, hden);
619 /* Special code for when the divisor < BASE. */
620 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
622 /* hnum != 0 already checked. */
623 for (i = 4 - 1; i >= 0; i--)
625 work = num[i] + carry * BASE;
626 quo[i] = work / lden;
632 /* Full double precision division,
633 with thanks to Don Knuth's "Seminumerical Algorithms". */
634 int num_hi_sig, den_hi_sig;
635 unsigned HOST_WIDE_INT quo_est, scale;
637 /* Find the highest nonzero divisor digit. */
638 for (i = 4 - 1;; i--)
645 /* Insure that the first digit of the divisor is at least BASE/2.
646 This is required by the quotient digit estimation algorithm. */
648 scale = BASE / (den[den_hi_sig] + 1);
650 { /* scale divisor and dividend */
652 for (i = 0; i <= 4 - 1; i++)
654 work = (num[i] * scale) + carry;
655 num[i] = LOWPART (work);
656 carry = HIGHPART (work);
661 for (i = 0; i <= 4 - 1; i++)
663 work = (den[i] * scale) + carry;
664 den[i] = LOWPART (work);
665 carry = HIGHPART (work);
666 if (den[i] != 0) den_hi_sig = i;
673 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
675 /* Guess the next quotient digit, quo_est, by dividing the first
676 two remaining dividend digits by the high order quotient digit.
677 quo_est is never low and is at most 2 high. */
678 unsigned HOST_WIDE_INT tmp;
680 num_hi_sig = i + den_hi_sig + 1;
681 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
682 if (num[num_hi_sig] != den[den_hi_sig])
683 quo_est = work / den[den_hi_sig];
687 /* Refine quo_est so it's usually correct, and at most one high. */
688 tmp = work - quo_est * den[den_hi_sig];
690 && (den[den_hi_sig - 1] * quo_est
691 > (tmp * BASE + num[num_hi_sig - 2])))
694 /* Try QUO_EST as the quotient digit, by multiplying the
695 divisor by QUO_EST and subtracting from the remaining dividend.
696 Keep in mind that QUO_EST is the I - 1st digit. */
699 for (j = 0; j <= den_hi_sig; j++)
701 work = quo_est * den[j] + carry;
702 carry = HIGHPART (work);
703 work = num[i + j] - LOWPART (work);
704 num[i + j] = LOWPART (work);
705 carry += HIGHPART (work) != 0;
708 /* If quo_est was high by one, then num[i] went negative and
709 we need to correct things. */
710 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
713 carry = 0; /* add divisor back in */
714 for (j = 0; j <= den_hi_sig; j++)
716 work = num[i + j] + den[j] + carry;
717 carry = HIGHPART (work);
718 num[i + j] = LOWPART (work);
721 num [num_hi_sig] += carry;
724 /* Store the quotient digit. */
729 decode (quo, lquo, hquo);
732 /* If result is negative, make it so. */
734 neg_double (*lquo, *hquo, lquo, hquo);
736 /* Compute trial remainder: rem = num - (quo * den) */
737 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
738 neg_double (*lrem, *hrem, lrem, hrem);
739 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
744 case TRUNC_MOD_EXPR: /* round toward zero */
745 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
749 case FLOOR_MOD_EXPR: /* round toward negative infinity */
750 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
761 case CEIL_MOD_EXPR: /* round toward positive infinity */
762 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
764 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
772 case ROUND_MOD_EXPR: /* round to closest integer */
774 unsigned HOST_WIDE_INT labs_rem = *lrem;
775 HOST_WIDE_INT habs_rem = *hrem;
776 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
777 HOST_WIDE_INT habs_den = hden, htwice;
779 /* Get absolute values. */
781 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
783 neg_double (lden, hden, &labs_den, &habs_den);
785 /* If (2 * abs (lrem) >= abs (lden)) */
786 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
787 labs_rem, habs_rem, <wice, &htwice);
789 if (((unsigned HOST_WIDE_INT) habs_den
790 < (unsigned HOST_WIDE_INT) htwice)
791 || (((unsigned HOST_WIDE_INT) habs_den
792 == (unsigned HOST_WIDE_INT) htwice)
793 && (labs_den < ltwice)))
797 add_double (*lquo, *hquo,
798 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
801 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
813 /* Compute true remainder: rem = num - (quo * den) */
814 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
815 neg_double (*lrem, *hrem, lrem, hrem);
816 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
820 /* Return true if built-in mathematical function specified by CODE
821 preserves the sign of it argument, i.e. -f(x) == f(-x). */
824 negate_mathfn_p (enum built_in_function code)
848 /* Determine whether an expression T can be cheaply negated using
849 the function negate_expr. */
852 negate_expr_p (tree t)
854 unsigned HOST_WIDE_INT val;
861 type = TREE_TYPE (t);
864 switch (TREE_CODE (t))
867 if (TYPE_UNSIGNED (type) || ! flag_trapv)
870 /* Check that -CST will not overflow type. */
871 prec = TYPE_PRECISION (type);
872 if (prec > HOST_BITS_PER_WIDE_INT)
874 if (TREE_INT_CST_LOW (t) != 0)
876 prec -= HOST_BITS_PER_WIDE_INT;
877 val = TREE_INT_CST_HIGH (t);
880 val = TREE_INT_CST_LOW (t);
881 if (prec < HOST_BITS_PER_WIDE_INT)
882 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
883 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
890 return negate_expr_p (TREE_REALPART (t))
891 && negate_expr_p (TREE_IMAGPART (t));
894 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
896 /* -(A + B) -> (-B) - A. */
897 if (negate_expr_p (TREE_OPERAND (t, 1))
898 && reorder_operands_p (TREE_OPERAND (t, 0),
899 TREE_OPERAND (t, 1)))
901 /* -(A + B) -> (-A) - B. */
902 return negate_expr_p (TREE_OPERAND (t, 0));
905 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
906 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
907 && reorder_operands_p (TREE_OPERAND (t, 0),
908 TREE_OPERAND (t, 1));
911 if (TYPE_UNSIGNED (TREE_TYPE (t)))
917 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
918 return negate_expr_p (TREE_OPERAND (t, 1))
919 || negate_expr_p (TREE_OPERAND (t, 0));
923 /* Negate -((double)float) as (double)(-float). */
924 if (TREE_CODE (type) == REAL_TYPE)
926 tree tem = strip_float_extensions (t);
928 return negate_expr_p (tem);
933 /* Negate -f(x) as f(-x). */
934 if (negate_mathfn_p (builtin_mathfn_code (t)))
935 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
939 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
940 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
942 tree op1 = TREE_OPERAND (t, 1);
943 if (TREE_INT_CST_HIGH (op1) == 0
944 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
945 == TREE_INT_CST_LOW (op1))
956 /* Given T, an expression, return the negation of T. Allow for T to be
957 null, in which case return null. */
968 type = TREE_TYPE (t);
971 switch (TREE_CODE (t))
974 tem = fold_negate_const (t, type);
975 if (! TREE_OVERFLOW (tem)
976 || TYPE_UNSIGNED (type)
982 tem = fold_negate_const (t, type);
983 /* Two's complement FP formats, such as c4x, may overflow. */
984 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
985 return fold_convert (type, tem);
990 tree rpart = negate_expr (TREE_REALPART (t));
991 tree ipart = negate_expr (TREE_IMAGPART (t));
993 if ((TREE_CODE (rpart) == REAL_CST
994 && TREE_CODE (ipart) == REAL_CST)
995 || (TREE_CODE (rpart) == INTEGER_CST
996 && TREE_CODE (ipart) == INTEGER_CST))
997 return build_complex (type, rpart, ipart);
1002 return fold_convert (type, TREE_OPERAND (t, 0));
1005 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1007 /* -(A + B) -> (-B) - A. */
1008 if (negate_expr_p (TREE_OPERAND (t, 1))
1009 && reorder_operands_p (TREE_OPERAND (t, 0),
1010 TREE_OPERAND (t, 1)))
1012 tem = negate_expr (TREE_OPERAND (t, 1));
1013 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1014 tem, TREE_OPERAND (t, 0)));
1015 return fold_convert (type, tem);
1018 /* -(A + B) -> (-A) - B. */
1019 if (negate_expr_p (TREE_OPERAND (t, 0)))
1021 tem = negate_expr (TREE_OPERAND (t, 0));
1022 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1023 tem, TREE_OPERAND (t, 1)));
1024 return fold_convert (type, tem);
1030 /* - (A - B) -> B - A */
1031 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1032 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1033 return fold_convert (type,
1034 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1035 TREE_OPERAND (t, 1),
1036 TREE_OPERAND (t, 0))));
1040 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1046 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1048 tem = TREE_OPERAND (t, 1);
1049 if (negate_expr_p (tem))
1050 return fold_convert (type,
1051 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1052 TREE_OPERAND (t, 0),
1053 negate_expr (tem))));
1054 tem = TREE_OPERAND (t, 0);
1055 if (negate_expr_p (tem))
1056 return fold_convert (type,
1057 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1059 TREE_OPERAND (t, 1))));
1064 /* Convert -((double)float) into (double)(-float). */
1065 if (TREE_CODE (type) == REAL_TYPE)
1067 tem = strip_float_extensions (t);
1068 if (tem != t && negate_expr_p (tem))
1069 return fold_convert (type, negate_expr (tem));
1074 /* Negate -f(x) as f(-x). */
1075 if (negate_mathfn_p (builtin_mathfn_code (t))
1076 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1078 tree fndecl, arg, arglist;
1080 fndecl = get_callee_fndecl (t);
1081 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1082 arglist = build_tree_list (NULL_TREE, arg);
1083 return build_function_call_expr (fndecl, arglist);
1088 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1089 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1091 tree op1 = TREE_OPERAND (t, 1);
1092 if (TREE_INT_CST_HIGH (op1) == 0
1093 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1094 == TREE_INT_CST_LOW (op1))
1096 tree ntype = TYPE_UNSIGNED (type)
1097 ? lang_hooks.types.signed_type (type)
1098 : lang_hooks.types.unsigned_type (type);
1099 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1100 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1101 return fold_convert (type, temp);
1110 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1111 return fold_convert (type, tem);
1114 /* Split a tree IN into a constant, literal and variable parts that could be
1115 combined with CODE to make IN. "constant" means an expression with
1116 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1117 commutative arithmetic operation. Store the constant part into *CONP,
1118 the literal in *LITP and return the variable part. If a part isn't
1119 present, set it to null. If the tree does not decompose in this way,
1120 return the entire tree as the variable part and the other parts as null.
1122 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1123 case, we negate an operand that was subtracted. Except if it is a
1124 literal for which we use *MINUS_LITP instead.
1126 If NEGATE_P is true, we are negating all of IN, again except a literal
1127 for which we use *MINUS_LITP instead.
1129 If IN is itself a literal or constant, return it as appropriate.
1131 Note that we do not guarantee that any of the three values will be the
1132 same type as IN, but they will have the same signedness and mode. */
1135 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1136 tree *minus_litp, int negate_p)
1144 /* Strip any conversions that don't change the machine mode or signedness. */
1145 STRIP_SIGN_NOPS (in);
1147 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1149 else if (TREE_CODE (in) == code
1150 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1151 /* We can associate addition and subtraction together (even
1152 though the C standard doesn't say so) for integers because
1153 the value is not affected. For reals, the value might be
1154 affected, so we can't. */
1155 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1156 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1158 tree op0 = TREE_OPERAND (in, 0);
1159 tree op1 = TREE_OPERAND (in, 1);
1160 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1161 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1163 /* First see if either of the operands is a literal, then a constant. */
1164 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1165 *litp = op0, op0 = 0;
1166 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1167 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1169 if (op0 != 0 && TREE_CONSTANT (op0))
1170 *conp = op0, op0 = 0;
1171 else if (op1 != 0 && TREE_CONSTANT (op1))
1172 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1174 /* If we haven't dealt with either operand, this is not a case we can
1175 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1176 if (op0 != 0 && op1 != 0)
1181 var = op1, neg_var_p = neg1_p;
1183 /* Now do any needed negations. */
1185 *minus_litp = *litp, *litp = 0;
1187 *conp = negate_expr (*conp);
1189 var = negate_expr (var);
1191 else if (TREE_CONSTANT (in))
1199 *minus_litp = *litp, *litp = 0;
1200 else if (*minus_litp)
1201 *litp = *minus_litp, *minus_litp = 0;
1202 *conp = negate_expr (*conp);
1203 var = negate_expr (var);
1209 /* Re-associate trees split by the above function. T1 and T2 are either
1210 expressions to associate or null. Return the new expression, if any. If
1211 we build an operation, do it in TYPE and with CODE. */
1214 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1221 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1222 try to fold this since we will have infinite recursion. But do
1223 deal with any NEGATE_EXPRs. */
1224 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1225 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1227 if (code == PLUS_EXPR)
1229 if (TREE_CODE (t1) == NEGATE_EXPR)
1230 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1231 fold_convert (type, TREE_OPERAND (t1, 0)));
1232 else if (TREE_CODE (t2) == NEGATE_EXPR)
1233 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1234 fold_convert (type, TREE_OPERAND (t2, 0)));
1236 return build2 (code, type, fold_convert (type, t1),
1237 fold_convert (type, t2));
1240 return fold (build2 (code, type, fold_convert (type, t1),
1241 fold_convert (type, t2)));
1244 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1245 to produce a new constant.
1247 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1250 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1252 unsigned HOST_WIDE_INT int1l, int2l;
1253 HOST_WIDE_INT int1h, int2h;
1254 unsigned HOST_WIDE_INT low;
1256 unsigned HOST_WIDE_INT garbagel;
1257 HOST_WIDE_INT garbageh;
1259 tree type = TREE_TYPE (arg1);
1260 int uns = TYPE_UNSIGNED (type);
1262 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1264 int no_overflow = 0;
1266 int1l = TREE_INT_CST_LOW (arg1);
1267 int1h = TREE_INT_CST_HIGH (arg1);
1268 int2l = TREE_INT_CST_LOW (arg2);
1269 int2h = TREE_INT_CST_HIGH (arg2);
1274 low = int1l | int2l, hi = int1h | int2h;
1278 low = int1l ^ int2l, hi = int1h ^ int2h;
1282 low = int1l & int2l, hi = int1h & int2h;
1288 /* It's unclear from the C standard whether shifts can overflow.
1289 The following code ignores overflow; perhaps a C standard
1290 interpretation ruling is needed. */
1291 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1299 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1304 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1308 neg_double (int2l, int2h, &low, &hi);
1309 add_double (int1l, int1h, low, hi, &low, &hi);
1310 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1314 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1317 case TRUNC_DIV_EXPR:
1318 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1319 case EXACT_DIV_EXPR:
1320 /* This is a shortcut for a common special case. */
1321 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1322 && ! TREE_CONSTANT_OVERFLOW (arg1)
1323 && ! TREE_CONSTANT_OVERFLOW (arg2)
1324 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1326 if (code == CEIL_DIV_EXPR)
1329 low = int1l / int2l, hi = 0;
1333 /* ... fall through ... */
1335 case ROUND_DIV_EXPR:
1336 if (int2h == 0 && int2l == 1)
1338 low = int1l, hi = int1h;
1341 if (int1l == int2l && int1h == int2h
1342 && ! (int1l == 0 && int1h == 0))
1347 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1348 &low, &hi, &garbagel, &garbageh);
1351 case TRUNC_MOD_EXPR:
1352 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1353 /* This is a shortcut for a common special case. */
1354 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1355 && ! TREE_CONSTANT_OVERFLOW (arg1)
1356 && ! TREE_CONSTANT_OVERFLOW (arg2)
1357 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1359 if (code == CEIL_MOD_EXPR)
1361 low = int1l % int2l, hi = 0;
1365 /* ... fall through ... */
1367 case ROUND_MOD_EXPR:
1368 overflow = div_and_round_double (code, uns,
1369 int1l, int1h, int2l, int2h,
1370 &garbagel, &garbageh, &low, &hi);
1376 low = (((unsigned HOST_WIDE_INT) int1h
1377 < (unsigned HOST_WIDE_INT) int2h)
1378 || (((unsigned HOST_WIDE_INT) int1h
1379 == (unsigned HOST_WIDE_INT) int2h)
1382 low = (int1h < int2h
1383 || (int1h == int2h && int1l < int2l));
1385 if (low == (code == MIN_EXPR))
1386 low = int1l, hi = int1h;
1388 low = int2l, hi = int2h;
1395 /* If this is for a sizetype, can be represented as one (signed)
1396 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1399 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1400 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1401 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1402 return size_int_type_wide (low, type);
1405 t = build_int_2 (low, hi);
1406 TREE_TYPE (t) = TREE_TYPE (arg1);
1411 ? (!uns || is_sizetype) && overflow
1412 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1414 | TREE_OVERFLOW (arg1)
1415 | TREE_OVERFLOW (arg2));
1417 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1418 So check if force_fit_type truncated the value. */
1420 && ! TREE_OVERFLOW (t)
1421 && (TREE_INT_CST_HIGH (t) != hi
1422 || TREE_INT_CST_LOW (t) != low))
1423 TREE_OVERFLOW (t) = 1;
1425 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1426 | TREE_CONSTANT_OVERFLOW (arg1)
1427 | TREE_CONSTANT_OVERFLOW (arg2));
1431 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1432 constant. We assume ARG1 and ARG2 have the same data type, or at least
1433 are the same kind of constant and the same machine mode.
1435 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1438 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1443 if (TREE_CODE (arg1) == INTEGER_CST)
1444 return int_const_binop (code, arg1, arg2, notrunc);
1446 if (TREE_CODE (arg1) == REAL_CST)
1448 enum machine_mode mode;
1451 REAL_VALUE_TYPE value;
1454 d1 = TREE_REAL_CST (arg1);
1455 d2 = TREE_REAL_CST (arg2);
1457 type = TREE_TYPE (arg1);
1458 mode = TYPE_MODE (type);
1460 /* Don't perform operation if we honor signaling NaNs and
1461 either operand is a NaN. */
1462 if (HONOR_SNANS (mode)
1463 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1466 /* Don't perform operation if it would raise a division
1467 by zero exception. */
1468 if (code == RDIV_EXPR
1469 && REAL_VALUES_EQUAL (d2, dconst0)
1470 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1473 /* If either operand is a NaN, just return it. Otherwise, set up
1474 for floating-point trap; we return an overflow. */
1475 if (REAL_VALUE_ISNAN (d1))
1477 else if (REAL_VALUE_ISNAN (d2))
1480 REAL_ARITHMETIC (value, code, d1, d2);
1482 t = build_real (type, real_value_truncate (mode, value));
1485 = (force_fit_type (t, 0)
1486 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1487 TREE_CONSTANT_OVERFLOW (t)
1489 | TREE_CONSTANT_OVERFLOW (arg1)
1490 | TREE_CONSTANT_OVERFLOW (arg2);
1493 if (TREE_CODE (arg1) == COMPLEX_CST)
1495 tree type = TREE_TYPE (arg1);
1496 tree r1 = TREE_REALPART (arg1);
1497 tree i1 = TREE_IMAGPART (arg1);
1498 tree r2 = TREE_REALPART (arg2);
1499 tree i2 = TREE_IMAGPART (arg2);
1505 t = build_complex (type,
1506 const_binop (PLUS_EXPR, r1, r2, notrunc),
1507 const_binop (PLUS_EXPR, i1, i2, notrunc));
1511 t = build_complex (type,
1512 const_binop (MINUS_EXPR, r1, r2, notrunc),
1513 const_binop (MINUS_EXPR, i1, i2, notrunc));
1517 t = build_complex (type,
1518 const_binop (MINUS_EXPR,
1519 const_binop (MULT_EXPR,
1521 const_binop (MULT_EXPR,
1524 const_binop (PLUS_EXPR,
1525 const_binop (MULT_EXPR,
1527 const_binop (MULT_EXPR,
1535 = const_binop (PLUS_EXPR,
1536 const_binop (MULT_EXPR, r2, r2, notrunc),
1537 const_binop (MULT_EXPR, i2, i2, notrunc),
1540 t = build_complex (type,
1542 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1543 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1544 const_binop (PLUS_EXPR,
1545 const_binop (MULT_EXPR, r1, r2,
1547 const_binop (MULT_EXPR, i1, i2,
1550 magsquared, notrunc),
1552 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1553 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1554 const_binop (MINUS_EXPR,
1555 const_binop (MULT_EXPR, i1, r2,
1557 const_binop (MULT_EXPR, r1, i2,
1560 magsquared, notrunc));
1572 /* These are the hash table functions for the hash table of INTEGER_CST
1573 nodes of a sizetype. */
1575 /* Return the hash code code X, an INTEGER_CST. */
1578 size_htab_hash (const void *x)
1582 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1583 ^ htab_hash_pointer (TREE_TYPE (t))
1584 ^ (TREE_OVERFLOW (t) << 20));
1587 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1588 is the same as that given by *Y, which is the same. */
1591 size_htab_eq (const void *x, const void *y)
1596 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1597 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1598 && TREE_TYPE (xt) == TREE_TYPE (yt)
1599 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1602 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1603 bits are given by NUMBER and of the sizetype represented by KIND. */
1606 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1608 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1611 /* Likewise, but the desired type is specified explicitly. */
1613 static GTY (()) tree new_const;
1614 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1618 size_int_type_wide (HOST_WIDE_INT number, tree type)
1624 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1625 new_const = make_node (INTEGER_CST);
1628 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1629 hash table, we return the value from the hash table. Otherwise, we
1630 place that in the hash table and make a new node for the next time. */
1631 TREE_INT_CST_LOW (new_const) = number;
1632 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1633 TREE_TYPE (new_const) = type;
1634 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1635 = force_fit_type (new_const, 0);
1637 slot = htab_find_slot (size_htab, new_const, INSERT);
1643 new_const = make_node (INTEGER_CST);
1647 return (tree) *slot;
1650 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1651 is a tree code. The type of the result is taken from the operands.
1652 Both must be the same type integer type and it must be a size type.
1653 If the operands are constant, so is the result. */
1656 size_binop (enum tree_code code, tree arg0, tree arg1)
1658 tree type = TREE_TYPE (arg0);
1660 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1661 || type != TREE_TYPE (arg1))
1664 /* Handle the special case of two integer constants faster. */
1665 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1667 /* And some specific cases even faster than that. */
1668 if (code == PLUS_EXPR && integer_zerop (arg0))
1670 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1671 && integer_zerop (arg1))
1673 else if (code == MULT_EXPR && integer_onep (arg0))
1676 /* Handle general case of two integer constants. */
1677 return int_const_binop (code, arg0, arg1, 0);
1680 if (arg0 == error_mark_node || arg1 == error_mark_node)
1681 return error_mark_node;
1683 return fold (build2 (code, type, arg0, arg1));
1686 /* Given two values, either both of sizetype or both of bitsizetype,
1687 compute the difference between the two values. Return the value
1688 in signed type corresponding to the type of the operands. */
1691 size_diffop (tree arg0, tree arg1)
1693 tree type = TREE_TYPE (arg0);
1696 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1697 || type != TREE_TYPE (arg1))
1700 /* If the type is already signed, just do the simple thing. */
1701 if (!TYPE_UNSIGNED (type))
1702 return size_binop (MINUS_EXPR, arg0, arg1);
1704 ctype = (type == bitsizetype || type == ubitsizetype
1705 ? sbitsizetype : ssizetype);
1707 /* If either operand is not a constant, do the conversions to the signed
1708 type and subtract. The hardware will do the right thing with any
1709 overflow in the subtraction. */
1710 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1711 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1712 fold_convert (ctype, arg1));
1714 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1715 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1716 overflow) and negate (which can't either). Special-case a result
1717 of zero while we're here. */
1718 if (tree_int_cst_equal (arg0, arg1))
1719 return fold_convert (ctype, integer_zero_node);
1720 else if (tree_int_cst_lt (arg1, arg0))
1721 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1723 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1724 fold_convert (ctype, size_binop (MINUS_EXPR,
1729 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1730 type TYPE. If no simplification can be done return NULL_TREE. */
1733 fold_convert_const (enum tree_code code, tree type, tree arg1)
1738 if (TREE_TYPE (arg1) == type)
1741 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1743 if (TREE_CODE (arg1) == INTEGER_CST)
1745 /* If we would build a constant wider than GCC supports,
1746 leave the conversion unfolded. */
1747 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1750 /* If we are trying to make a sizetype for a small integer, use
1751 size_int to pick up cached types to reduce duplicate nodes. */
1752 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1753 && !TREE_CONSTANT_OVERFLOW (arg1)
1754 && compare_tree_int (arg1, 10000) < 0)
1755 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1757 /* Given an integer constant, make new constant with new type,
1758 appropriately sign-extended or truncated. */
1759 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1760 TREE_INT_CST_HIGH (arg1));
1761 TREE_TYPE (t) = type;
1762 /* Indicate an overflow if (1) ARG1 already overflowed,
1763 or (2) force_fit_type indicates an overflow.
1764 Tell force_fit_type that an overflow has already occurred
1765 if ARG1 is a too-large unsigned value and T is signed.
1766 But don't indicate an overflow if converting a pointer. */
1768 = ((force_fit_type (t,
1769 (TREE_INT_CST_HIGH (arg1) < 0
1770 && (TYPE_UNSIGNED (type)
1771 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1772 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1773 || TREE_OVERFLOW (arg1));
1774 TREE_CONSTANT_OVERFLOW (t)
1775 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1778 else if (TREE_CODE (arg1) == REAL_CST)
1780 /* The following code implements the floating point to integer
1781 conversion rules required by the Java Language Specification,
1782 that IEEE NaNs are mapped to zero and values that overflow
1783 the target precision saturate, i.e. values greater than
1784 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1785 are mapped to INT_MIN. These semantics are allowed by the
1786 C and C++ standards that simply state that the behavior of
1787 FP-to-integer conversion is unspecified upon overflow. */
1789 HOST_WIDE_INT high, low;
1792 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1796 case FIX_TRUNC_EXPR:
1797 real_trunc (&r, VOIDmode, &x);
1801 real_ceil (&r, VOIDmode, &x);
1804 case FIX_FLOOR_EXPR:
1805 real_floor (&r, VOIDmode, &x);
1808 case FIX_ROUND_EXPR:
1809 real_round (&r, VOIDmode, &x);
1816 /* If R is NaN, return zero and show we have an overflow. */
1817 if (REAL_VALUE_ISNAN (r))
1824 /* See if R is less than the lower bound or greater than the
1829 tree lt = TYPE_MIN_VALUE (type);
1830 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1831 if (REAL_VALUES_LESS (r, l))
1834 high = TREE_INT_CST_HIGH (lt);
1835 low = TREE_INT_CST_LOW (lt);
1841 tree ut = TYPE_MAX_VALUE (type);
1844 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1845 if (REAL_VALUES_LESS (u, r))
1848 high = TREE_INT_CST_HIGH (ut);
1849 low = TREE_INT_CST_LOW (ut);
1855 REAL_VALUE_TO_INT (&low, &high, r);
1857 t = build_int_2 (low, high);
1858 TREE_TYPE (t) = type;
1860 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1861 TREE_CONSTANT_OVERFLOW (t)
1862 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1866 else if (TREE_CODE (type) == REAL_TYPE)
1868 if (TREE_CODE (arg1) == INTEGER_CST)
1869 return build_real_from_int_cst (type, arg1);
1870 if (TREE_CODE (arg1) == REAL_CST)
1872 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1874 /* We make a copy of ARG1 so that we don't modify an
1875 existing constant tree. */
1876 t = copy_node (arg1);
1877 TREE_TYPE (t) = type;
1881 t = build_real (type,
1882 real_value_truncate (TYPE_MODE (type),
1883 TREE_REAL_CST (arg1)));
1886 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1887 TREE_CONSTANT_OVERFLOW (t)
1888 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1895 /* Convert expression ARG to type TYPE. Used by the middle-end for
1896 simple conversions in preference to calling the front-end's convert. */
1899 fold_convert (tree type, tree arg)
1901 tree orig = TREE_TYPE (arg);
1907 if (TREE_CODE (arg) == ERROR_MARK
1908 || TREE_CODE (type) == ERROR_MARK
1909 || TREE_CODE (orig) == ERROR_MARK)
1910 return error_mark_node;
1912 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1913 return fold (build1 (NOP_EXPR, type, arg));
1915 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1916 || TREE_CODE (type) == OFFSET_TYPE)
1918 if (TREE_CODE (arg) == INTEGER_CST)
1920 tem = fold_convert_const (NOP_EXPR, type, arg);
1921 if (tem != NULL_TREE)
1924 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1925 || TREE_CODE (orig) == OFFSET_TYPE)
1926 return fold (build1 (NOP_EXPR, type, arg));
1927 if (TREE_CODE (orig) == COMPLEX_TYPE)
1929 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1930 return fold_convert (type, tem);
1932 if (TREE_CODE (orig) == VECTOR_TYPE
1933 && GET_MODE_SIZE (TYPE_MODE (type))
1934 == GET_MODE_SIZE (TYPE_MODE (orig)))
1935 return fold (build1 (NOP_EXPR, type, arg));
1937 else if (TREE_CODE (type) == REAL_TYPE)
1939 if (TREE_CODE (arg) == INTEGER_CST)
1941 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1945 else if (TREE_CODE (arg) == REAL_CST)
1947 tem = fold_convert_const (NOP_EXPR, type, arg);
1948 if (tem != NULL_TREE)
1952 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1953 return fold (build1 (FLOAT_EXPR, type, arg));
1954 if (TREE_CODE (orig) == REAL_TYPE)
1955 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1957 if (TREE_CODE (orig) == COMPLEX_TYPE)
1959 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1960 return fold_convert (type, tem);
1963 else if (TREE_CODE (type) == COMPLEX_TYPE)
1965 if (INTEGRAL_TYPE_P (orig)
1966 || POINTER_TYPE_P (orig)
1967 || TREE_CODE (orig) == REAL_TYPE)
1968 return build2 (COMPLEX_EXPR, type,
1969 fold_convert (TREE_TYPE (type), arg),
1970 fold_convert (TREE_TYPE (type), integer_zero_node));
1971 if (TREE_CODE (orig) == COMPLEX_TYPE)
1975 if (TREE_CODE (arg) == COMPLEX_EXPR)
1977 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1978 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1979 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1982 arg = save_expr (arg);
1983 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1984 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1985 rpart = fold_convert (TREE_TYPE (type), rpart);
1986 ipart = fold_convert (TREE_TYPE (type), ipart);
1987 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1990 else if (TREE_CODE (type) == VECTOR_TYPE)
1992 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1993 && GET_MODE_SIZE (TYPE_MODE (type))
1994 == GET_MODE_SIZE (TYPE_MODE (orig)))
1995 return fold (build1 (NOP_EXPR, type, arg));
1996 if (TREE_CODE (orig) == VECTOR_TYPE
1997 && GET_MODE_SIZE (TYPE_MODE (type))
1998 == GET_MODE_SIZE (TYPE_MODE (orig)))
1999 return fold (build1 (NOP_EXPR, type, arg));
2001 else if (VOID_TYPE_P (type))
2002 return fold (build1 (CONVERT_EXPR, type, arg));
2006 /* Return an expr equal to X but certainly not valid as an lvalue. */
2011 /* We only need to wrap lvalue tree codes. */
2012 switch (TREE_CODE (x))
2026 case ARRAY_RANGE_REF:
2031 case PREINCREMENT_EXPR:
2032 case PREDECREMENT_EXPR:
2035 case TRY_CATCH_EXPR:
2036 case WITH_CLEANUP_EXPR:
2048 /* Assume the worst for front-end tree codes. */
2049 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2053 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2056 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2057 Zero means allow extended lvalues. */
2059 int pedantic_lvalues;
2061 /* When pedantic, return an expr equal to X but certainly not valid as a
2062 pedantic lvalue. Otherwise, return X. */
2065 pedantic_non_lvalue (tree x)
2067 if (pedantic_lvalues)
2068 return non_lvalue (x);
2073 /* Given a tree comparison code, return the code that is the logical inverse
2074 of the given code. It is not safe to do this for floating-point
2075 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2076 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2078 static enum tree_code
2079 invert_tree_comparison (enum tree_code code, bool honor_nans)
2081 if (honor_nans && flag_trapping_math)
2091 return honor_nans ? UNLE_EXPR : LE_EXPR;
2093 return honor_nans ? UNLT_EXPR : LT_EXPR;
2095 return honor_nans ? UNGE_EXPR : GE_EXPR;
2097 return honor_nans ? UNGT_EXPR : GT_EXPR;
2111 return UNORDERED_EXPR;
2112 case UNORDERED_EXPR:
2113 return ORDERED_EXPR;
2119 /* Similar, but return the comparison that results if the operands are
2120 swapped. This is safe for floating-point. */
2122 static enum tree_code
2123 swap_tree_comparison (enum tree_code code)
2144 /* Convert a comparison tree code from an enum tree_code representation
2145 into a compcode bit-based encoding. This function is the inverse of
2146 compcode_to_comparison. */
2148 static enum comparison_code
2149 comparison_to_compcode (enum tree_code code)
2166 return COMPCODE_ORD;
2167 case UNORDERED_EXPR:
2168 return COMPCODE_UNORD;
2170 return COMPCODE_UNLT;
2172 return COMPCODE_UNEQ;
2174 return COMPCODE_UNLE;
2176 return COMPCODE_UNGT;
2178 return COMPCODE_LTGT;
2180 return COMPCODE_UNGE;
2186 /* Convert a compcode bit-based encoding of a comparison operator back
2187 to GCC's enum tree_code representation. This function is the
2188 inverse of comparison_to_compcode. */
2190 static enum tree_code
2191 compcode_to_comparison (enum comparison_code code)
2208 return ORDERED_EXPR;
2209 case COMPCODE_UNORD:
2210 return UNORDERED_EXPR;
2228 /* Return a tree for the comparison which is the combination of
2229 doing the AND or OR (depending on CODE) of the two operations LCODE
2230 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2231 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2232 if this makes the transformation invalid. */
2235 combine_comparisons (enum tree_code code, enum tree_code lcode,
2236 enum tree_code rcode, tree truth_type,
2237 tree ll_arg, tree lr_arg)
2239 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2240 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2241 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2242 enum comparison_code compcode;
2246 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2247 compcode = lcompcode & rcompcode;
2250 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2251 compcode = lcompcode | rcompcode;
2260 /* Eliminate unordered comparisons, as well as LTGT and ORD
2261 which are not used unless the mode has NaNs. */
2262 compcode &= ~COMPCODE_UNORD;
2263 if (compcode == COMPCODE_LTGT)
2264 compcode = COMPCODE_NE;
2265 else if (compcode == COMPCODE_ORD)
2266 compcode = COMPCODE_TRUE;
2268 else if (flag_trapping_math)
2270 /* Check that the original operation and the optimized ones will trap
2271 under the same condition. */
2272 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2273 && (lcompcode != COMPCODE_EQ)
2274 && (lcompcode != COMPCODE_ORD);
2275 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2276 && (rcompcode != COMPCODE_EQ)
2277 && (rcompcode != COMPCODE_ORD);
2278 bool trap = (compcode & COMPCODE_UNORD) == 0
2279 && (compcode != COMPCODE_EQ)
2280 && (compcode != COMPCODE_ORD);
2282 /* In a short-circuited boolean expression the LHS might be
2283 such that the RHS, if evaluated, will never trap. For
2284 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2285 if neither x nor y is NaN. (This is a mixed blessing: for
2286 example, the expression above will never trap, hence
2287 optimizing it to x < y would be invalid). */
2288 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2289 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2292 /* If the comparison was short-circuited, and only the RHS
2293 trapped, we may now generate a spurious trap. */
2295 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2298 /* If we changed the conditions that cause a trap, we lose. */
2299 if ((ltrap || rtrap) != trap)
2303 if (compcode == COMPCODE_TRUE)
2304 return constant_boolean_node (true, truth_type);
2305 else if (compcode == COMPCODE_FALSE)
2306 return constant_boolean_node (false, truth_type);
2308 return fold (build2 (compcode_to_comparison (compcode),
2309 truth_type, ll_arg, lr_arg));
2312 /* Return nonzero if CODE is a tree code that represents a truth value. */
2315 truth_value_p (enum tree_code code)
2317 return (TREE_CODE_CLASS (code) == '<'
2318 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2319 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2320 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2323 /* Return nonzero if two operands (typically of the same tree node)
2324 are necessarily equal. If either argument has side-effects this
2325 function returns zero. FLAGS modifies behavior as follows:
2327 If OEP_ONLY_CONST is set, only return nonzero for constants.
2328 This function tests whether the operands are indistinguishable;
2329 it does not test whether they are equal using C's == operation.
2330 The distinction is important for IEEE floating point, because
2331 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2332 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2334 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2335 even though it may hold multiple values during a function.
2336 This is because a GCC tree node guarantees that nothing else is
2337 executed between the evaluation of its "operands" (which may often
2338 be evaluated in arbitrary order). Hence if the operands themselves
2339 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2340 same value in each operand/subexpression. Hence a zero value for
2341 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2342 If comparing arbitrary expression trees, such as from different
2343 statements, ONLY_CONST must usually be nonzero.
2345 If OEP_PURE_SAME is set, then pure functions with identical arguments
2346 are considered the same. It is used when the caller has other ways
2347 to ensure that global memory is unchanged in between. */
2350 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2352 /* If either is ERROR_MARK, they aren't equal. */
2353 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2356 /* If both types don't have the same signedness, then we can't consider
2357 them equal. We must check this before the STRIP_NOPS calls
2358 because they may change the signedness of the arguments. */
2359 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2365 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2366 /* This is needed for conversions and for COMPONENT_REF.
2367 Might as well play it safe and always test this. */
2368 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2369 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2370 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2373 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2374 We don't care about side effects in that case because the SAVE_EXPR
2375 takes care of that for us. In all other cases, two expressions are
2376 equal if they have no side effects. If we have two identical
2377 expressions with side effects that should be treated the same due
2378 to the only side effects being identical SAVE_EXPR's, that will
2379 be detected in the recursive calls below. */
2380 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2381 && (TREE_CODE (arg0) == SAVE_EXPR
2382 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2385 /* Next handle constant cases, those for which we can return 1 even
2386 if ONLY_CONST is set. */
2387 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2388 switch (TREE_CODE (arg0))
2391 return (! TREE_CONSTANT_OVERFLOW (arg0)
2392 && ! TREE_CONSTANT_OVERFLOW (arg1)
2393 && tree_int_cst_equal (arg0, arg1));
2396 return (! TREE_CONSTANT_OVERFLOW (arg0)
2397 && ! TREE_CONSTANT_OVERFLOW (arg1)
2398 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2399 TREE_REAL_CST (arg1)));
2405 if (TREE_CONSTANT_OVERFLOW (arg0)
2406 || TREE_CONSTANT_OVERFLOW (arg1))
2409 v1 = TREE_VECTOR_CST_ELTS (arg0);
2410 v2 = TREE_VECTOR_CST_ELTS (arg1);
2413 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2416 v1 = TREE_CHAIN (v1);
2417 v2 = TREE_CHAIN (v2);
2424 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2426 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2430 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2431 && ! memcmp (TREE_STRING_POINTER (arg0),
2432 TREE_STRING_POINTER (arg1),
2433 TREE_STRING_LENGTH (arg0)));
2436 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2442 if (flags & OEP_ONLY_CONST)
2445 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2448 /* Two conversions are equal only if signedness and modes match. */
2449 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2450 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2451 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2454 return operand_equal_p (TREE_OPERAND (arg0, 0),
2455 TREE_OPERAND (arg1, 0), flags);
2459 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2460 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2464 /* For commutative ops, allow the other order. */
2465 return (commutative_tree_code (TREE_CODE (arg0))
2466 && operand_equal_p (TREE_OPERAND (arg0, 0),
2467 TREE_OPERAND (arg1, 1), flags)
2468 && operand_equal_p (TREE_OPERAND (arg0, 1),
2469 TREE_OPERAND (arg1, 0), flags));
2472 /* If either of the pointer (or reference) expressions we are
2473 dereferencing contain a side effect, these cannot be equal. */
2474 if (TREE_SIDE_EFFECTS (arg0)
2475 || TREE_SIDE_EFFECTS (arg1))
2478 switch (TREE_CODE (arg0))
2481 return operand_equal_p (TREE_OPERAND (arg0, 0),
2482 TREE_OPERAND (arg1, 0), flags);
2486 case ARRAY_RANGE_REF:
2487 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2488 TREE_OPERAND (arg1, 0), flags)
2489 && operand_equal_p (TREE_OPERAND (arg0, 1),
2490 TREE_OPERAND (arg1, 1), flags));
2493 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2494 TREE_OPERAND (arg1, 0), flags)
2495 && operand_equal_p (TREE_OPERAND (arg0, 1),
2496 TREE_OPERAND (arg1, 1), flags)
2497 && operand_equal_p (TREE_OPERAND (arg0, 2),
2498 TREE_OPERAND (arg1, 2), flags));
2504 switch (TREE_CODE (arg0))
2507 case TRUTH_NOT_EXPR:
2508 return operand_equal_p (TREE_OPERAND (arg0, 0),
2509 TREE_OPERAND (arg1, 0), flags);
2512 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2515 /* If the CALL_EXPRs call different functions, then they
2516 clearly can not be equal. */
2517 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2518 TREE_OPERAND (arg1, 0), flags))
2522 unsigned int cef = call_expr_flags (arg0);
2523 if (flags & OEP_PURE_SAME)
2524 cef &= ECF_CONST | ECF_PURE;
2531 /* Now see if all the arguments are the same. operand_equal_p
2532 does not handle TREE_LIST, so we walk the operands here
2533 feeding them to operand_equal_p. */
2534 arg0 = TREE_OPERAND (arg0, 1);
2535 arg1 = TREE_OPERAND (arg1, 1);
2536 while (arg0 && arg1)
2538 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2542 arg0 = TREE_CHAIN (arg0);
2543 arg1 = TREE_CHAIN (arg1);
2546 /* If we get here and both argument lists are exhausted
2547 then the CALL_EXPRs are equal. */
2548 return ! (arg0 || arg1);
2555 /* Consider __builtin_sqrt equal to sqrt. */
2556 return (TREE_CODE (arg0) == FUNCTION_DECL
2557 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2558 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2559 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2566 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2567 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2569 When in doubt, return 0. */
2572 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2574 int unsignedp1, unsignedpo;
2575 tree primarg0, primarg1, primother;
2576 unsigned int correct_width;
2578 if (operand_equal_p (arg0, arg1, 0))
2581 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2582 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2585 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2586 and see if the inner values are the same. This removes any
2587 signedness comparison, which doesn't matter here. */
2588 primarg0 = arg0, primarg1 = arg1;
2589 STRIP_NOPS (primarg0);
2590 STRIP_NOPS (primarg1);
2591 if (operand_equal_p (primarg0, primarg1, 0))
2594 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2595 actual comparison operand, ARG0.
2597 First throw away any conversions to wider types
2598 already present in the operands. */
2600 primarg1 = get_narrower (arg1, &unsignedp1);
2601 primother = get_narrower (other, &unsignedpo);
2603 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2604 if (unsignedp1 == unsignedpo
2605 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2606 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2608 tree type = TREE_TYPE (arg0);
2610 /* Make sure shorter operand is extended the right way
2611 to match the longer operand. */
2612 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2613 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2615 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2622 /* See if ARG is an expression that is either a comparison or is performing
2623 arithmetic on comparisons. The comparisons must only be comparing
2624 two different values, which will be stored in *CVAL1 and *CVAL2; if
2625 they are nonzero it means that some operands have already been found.
2626 No variables may be used anywhere else in the expression except in the
2627 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2628 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2630 If this is true, return 1. Otherwise, return zero. */
2633 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2635 enum tree_code code = TREE_CODE (arg);
2636 char class = TREE_CODE_CLASS (code);
2638 /* We can handle some of the 'e' cases here. */
2639 if (class == 'e' && code == TRUTH_NOT_EXPR)
2641 else if (class == 'e'
2642 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2643 || code == COMPOUND_EXPR))
2646 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2647 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2649 /* If we've already found a CVAL1 or CVAL2, this expression is
2650 two complex to handle. */
2651 if (*cval1 || *cval2)
2661 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2664 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2665 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2666 cval1, cval2, save_p));
2672 if (code == COND_EXPR)
2673 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2674 cval1, cval2, save_p)
2675 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2676 cval1, cval2, save_p)
2677 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2678 cval1, cval2, save_p));
2682 /* First see if we can handle the first operand, then the second. For
2683 the second operand, we know *CVAL1 can't be zero. It must be that
2684 one side of the comparison is each of the values; test for the
2685 case where this isn't true by failing if the two operands
2688 if (operand_equal_p (TREE_OPERAND (arg, 0),
2689 TREE_OPERAND (arg, 1), 0))
2693 *cval1 = TREE_OPERAND (arg, 0);
2694 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2696 else if (*cval2 == 0)
2697 *cval2 = TREE_OPERAND (arg, 0);
2698 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2703 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2705 else if (*cval2 == 0)
2706 *cval2 = TREE_OPERAND (arg, 1);
2707 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2719 /* ARG is a tree that is known to contain just arithmetic operations and
2720 comparisons. Evaluate the operations in the tree substituting NEW0 for
2721 any occurrence of OLD0 as an operand of a comparison and likewise for
2725 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2727 tree type = TREE_TYPE (arg);
2728 enum tree_code code = TREE_CODE (arg);
2729 char class = TREE_CODE_CLASS (code);
2731 /* We can handle some of the 'e' cases here. */
2732 if (class == 'e' && code == TRUTH_NOT_EXPR)
2734 else if (class == 'e'
2735 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2741 return fold (build1 (code, type,
2742 eval_subst (TREE_OPERAND (arg, 0),
2743 old0, new0, old1, new1)));
2746 return fold (build2 (code, type,
2747 eval_subst (TREE_OPERAND (arg, 0),
2748 old0, new0, old1, new1),
2749 eval_subst (TREE_OPERAND (arg, 1),
2750 old0, new0, old1, new1)));
2756 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2759 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2762 return fold (build3 (code, type,
2763 eval_subst (TREE_OPERAND (arg, 0),
2764 old0, new0, old1, new1),
2765 eval_subst (TREE_OPERAND (arg, 1),
2766 old0, new0, old1, new1),
2767 eval_subst (TREE_OPERAND (arg, 2),
2768 old0, new0, old1, new1)));
2772 /* Fall through - ??? */
2776 tree arg0 = TREE_OPERAND (arg, 0);
2777 tree arg1 = TREE_OPERAND (arg, 1);
2779 /* We need to check both for exact equality and tree equality. The
2780 former will be true if the operand has a side-effect. In that
2781 case, we know the operand occurred exactly once. */
2783 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2785 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2788 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2790 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2793 return fold (build2 (code, type, arg0, arg1));
2801 /* Return a tree for the case when the result of an expression is RESULT
2802 converted to TYPE and OMITTED was previously an operand of the expression
2803 but is now not needed (e.g., we folded OMITTED * 0).
2805 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2806 the conversion of RESULT to TYPE. */
2809 omit_one_operand (tree type, tree result, tree omitted)
2811 tree t = fold_convert (type, result);
2813 if (TREE_SIDE_EFFECTS (omitted))
2814 return build2 (COMPOUND_EXPR, type, omitted, t);
2816 return non_lvalue (t);
2819 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2822 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2824 tree t = fold_convert (type, result);
2826 if (TREE_SIDE_EFFECTS (omitted))
2827 return build2 (COMPOUND_EXPR, type, omitted, t);
2829 return pedantic_non_lvalue (t);
2832 /* Return a simplified tree node for the truth-negation of ARG. This
2833 never alters ARG itself. We assume that ARG is an operation that
2834 returns a truth value (0 or 1).
2836 FIXME: one would think we would fold the result, but it causes
2837 problems with the dominator optimizer. */
2839 invert_truthvalue (tree arg)
2841 tree type = TREE_TYPE (arg);
2842 enum tree_code code = TREE_CODE (arg);
2844 if (code == ERROR_MARK)
2847 /* If this is a comparison, we can simply invert it, except for
2848 floating-point non-equality comparisons, in which case we just
2849 enclose a TRUTH_NOT_EXPR around what we have. */
2851 if (TREE_CODE_CLASS (code) == '<')
2853 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2854 if (FLOAT_TYPE_P (op_type)
2855 && flag_trapping_math
2856 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2857 && code != NE_EXPR && code != EQ_EXPR)
2858 return build1 (TRUTH_NOT_EXPR, type, arg);
2861 code = invert_tree_comparison (code,
2862 HONOR_NANS (TYPE_MODE (op_type)));
2863 if (code == ERROR_MARK)
2864 return build1 (TRUTH_NOT_EXPR, type, arg);
2866 return build2 (code, type,
2867 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2874 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2876 case TRUTH_AND_EXPR:
2877 return build2 (TRUTH_OR_EXPR, type,
2878 invert_truthvalue (TREE_OPERAND (arg, 0)),
2879 invert_truthvalue (TREE_OPERAND (arg, 1)));
2882 return build2 (TRUTH_AND_EXPR, type,
2883 invert_truthvalue (TREE_OPERAND (arg, 0)),
2884 invert_truthvalue (TREE_OPERAND (arg, 1)));
2886 case TRUTH_XOR_EXPR:
2887 /* Here we can invert either operand. We invert the first operand
2888 unless the second operand is a TRUTH_NOT_EXPR in which case our
2889 result is the XOR of the first operand with the inside of the
2890 negation of the second operand. */
2892 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2893 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2894 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2896 return build2 (TRUTH_XOR_EXPR, type,
2897 invert_truthvalue (TREE_OPERAND (arg, 0)),
2898 TREE_OPERAND (arg, 1));
2900 case TRUTH_ANDIF_EXPR:
2901 return build2 (TRUTH_ORIF_EXPR, type,
2902 invert_truthvalue (TREE_OPERAND (arg, 0)),
2903 invert_truthvalue (TREE_OPERAND (arg, 1)));
2905 case TRUTH_ORIF_EXPR:
2906 return build2 (TRUTH_ANDIF_EXPR, type,
2907 invert_truthvalue (TREE_OPERAND (arg, 0)),
2908 invert_truthvalue (TREE_OPERAND (arg, 1)));
2910 case TRUTH_NOT_EXPR:
2911 return TREE_OPERAND (arg, 0);
2914 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2915 invert_truthvalue (TREE_OPERAND (arg, 1)),
2916 invert_truthvalue (TREE_OPERAND (arg, 2)));
2919 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2920 invert_truthvalue (TREE_OPERAND (arg, 1)));
2922 case NON_LVALUE_EXPR:
2923 return invert_truthvalue (TREE_OPERAND (arg, 0));
2926 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2931 return build1 (TREE_CODE (arg), type,
2932 invert_truthvalue (TREE_OPERAND (arg, 0)));
2935 if (!integer_onep (TREE_OPERAND (arg, 1)))
2937 return build2 (EQ_EXPR, type, arg,
2938 fold_convert (type, integer_zero_node));
2941 return build1 (TRUTH_NOT_EXPR, type, arg);
2943 case CLEANUP_POINT_EXPR:
2944 return build1 (CLEANUP_POINT_EXPR, type,
2945 invert_truthvalue (TREE_OPERAND (arg, 0)));
2950 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2952 return build1 (TRUTH_NOT_EXPR, type, arg);
2955 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2956 operands are another bit-wise operation with a common input. If so,
2957 distribute the bit operations to save an operation and possibly two if
2958 constants are involved. For example, convert
2959 (A | B) & (A | C) into A | (B & C)
2960 Further simplification will occur if B and C are constants.
2962 If this optimization cannot be done, 0 will be returned. */
2965 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2970 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2971 || TREE_CODE (arg0) == code
2972 || (TREE_CODE (arg0) != BIT_AND_EXPR
2973 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2976 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2978 common = TREE_OPERAND (arg0, 0);
2979 left = TREE_OPERAND (arg0, 1);
2980 right = TREE_OPERAND (arg1, 1);
2982 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2984 common = TREE_OPERAND (arg0, 0);
2985 left = TREE_OPERAND (arg0, 1);
2986 right = TREE_OPERAND (arg1, 0);
2988 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2990 common = TREE_OPERAND (arg0, 1);
2991 left = TREE_OPERAND (arg0, 0);
2992 right = TREE_OPERAND (arg1, 1);
2994 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2996 common = TREE_OPERAND (arg0, 1);
2997 left = TREE_OPERAND (arg0, 0);
2998 right = TREE_OPERAND (arg1, 0);
3003 return fold (build2 (TREE_CODE (arg0), type, common,
3004 fold (build2 (code, type, left, right))));
3007 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3008 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3011 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3014 tree result = build3 (BIT_FIELD_REF, type, inner,
3015 size_int (bitsize), bitsize_int (bitpos));
3017 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3022 /* Optimize a bit-field compare.
3024 There are two cases: First is a compare against a constant and the
3025 second is a comparison of two items where the fields are at the same
3026 bit position relative to the start of a chunk (byte, halfword, word)
3027 large enough to contain it. In these cases we can avoid the shift
3028 implicit in bitfield extractions.
3030 For constants, we emit a compare of the shifted constant with the
3031 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3032 compared. For two fields at the same position, we do the ANDs with the
3033 similar mask and compare the result of the ANDs.
3035 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3036 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3037 are the left and right operands of the comparison, respectively.
3039 If the optimization described above can be done, we return the resulting
3040 tree. Otherwise we return zero. */
3043 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3046 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3047 tree type = TREE_TYPE (lhs);
3048 tree signed_type, unsigned_type;
3049 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3050 enum machine_mode lmode, rmode, nmode;
3051 int lunsignedp, runsignedp;
3052 int lvolatilep = 0, rvolatilep = 0;
3053 tree linner, rinner = NULL_TREE;
3057 /* Get all the information about the extractions being done. If the bit size
3058 if the same as the size of the underlying object, we aren't doing an
3059 extraction at all and so can do nothing. We also don't want to
3060 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3061 then will no longer be able to replace it. */
3062 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3063 &lunsignedp, &lvolatilep);
3064 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3065 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3070 /* If this is not a constant, we can only do something if bit positions,
3071 sizes, and signedness are the same. */
3072 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3073 &runsignedp, &rvolatilep);
3075 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3076 || lunsignedp != runsignedp || offset != 0
3077 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3081 /* See if we can find a mode to refer to this field. We should be able to,
3082 but fail if we can't. */
3083 nmode = get_best_mode (lbitsize, lbitpos,
3084 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3085 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3086 TYPE_ALIGN (TREE_TYPE (rinner))),
3087 word_mode, lvolatilep || rvolatilep);
3088 if (nmode == VOIDmode)
3091 /* Set signed and unsigned types of the precision of this mode for the
3093 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3094 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3096 /* Compute the bit position and size for the new reference and our offset
3097 within it. If the new reference is the same size as the original, we
3098 won't optimize anything, so return zero. */
3099 nbitsize = GET_MODE_BITSIZE (nmode);
3100 nbitpos = lbitpos & ~ (nbitsize - 1);
3102 if (nbitsize == lbitsize)
3105 if (BYTES_BIG_ENDIAN)
3106 lbitpos = nbitsize - lbitsize - lbitpos;
3108 /* Make the mask to be used against the extracted field. */
3109 mask = build_int_2 (~0, ~0);
3110 TREE_TYPE (mask) = unsigned_type;
3111 force_fit_type (mask, 0);
3112 mask = fold_convert (unsigned_type, mask);
3113 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3114 mask = const_binop (RSHIFT_EXPR, mask,
3115 size_int (nbitsize - lbitsize - lbitpos), 0);
3118 /* If not comparing with constant, just rework the comparison
3120 return build2 (code, compare_type,
3121 build2 (BIT_AND_EXPR, unsigned_type,
3122 make_bit_field_ref (linner, unsigned_type,
3123 nbitsize, nbitpos, 1),
3125 build2 (BIT_AND_EXPR, unsigned_type,
3126 make_bit_field_ref (rinner, unsigned_type,
3127 nbitsize, nbitpos, 1),
3130 /* Otherwise, we are handling the constant case. See if the constant is too
3131 big for the field. Warn and return a tree of for 0 (false) if so. We do
3132 this not only for its own sake, but to avoid having to test for this
3133 error case below. If we didn't, we might generate wrong code.
3135 For unsigned fields, the constant shifted right by the field length should
3136 be all zero. For signed fields, the high-order bits should agree with
3141 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3142 fold_convert (unsigned_type, rhs),
3143 size_int (lbitsize), 0)))
3145 warning ("comparison is always %d due to width of bit-field",
3147 return constant_boolean_node (code == NE_EXPR, compare_type);
3152 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3153 size_int (lbitsize - 1), 0);
3154 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3156 warning ("comparison is always %d due to width of bit-field",
3158 return constant_boolean_node (code == NE_EXPR, compare_type);
3162 /* Single-bit compares should always be against zero. */
3163 if (lbitsize == 1 && ! integer_zerop (rhs))
3165 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3166 rhs = fold_convert (type, integer_zero_node);
3169 /* Make a new bitfield reference, shift the constant over the
3170 appropriate number of bits and mask it with the computed mask
3171 (in case this was a signed field). If we changed it, make a new one. */
3172 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3175 TREE_SIDE_EFFECTS (lhs) = 1;
3176 TREE_THIS_VOLATILE (lhs) = 1;
3179 rhs = fold (const_binop (BIT_AND_EXPR,
3180 const_binop (LSHIFT_EXPR,
3181 fold_convert (unsigned_type, rhs),
3182 size_int (lbitpos), 0),
3185 return build2 (code, compare_type,
3186 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3190 /* Subroutine for fold_truthop: decode a field reference.
3192 If EXP is a comparison reference, we return the innermost reference.
3194 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3195 set to the starting bit number.
3197 If the innermost field can be completely contained in a mode-sized
3198 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3200 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3201 otherwise it is not changed.
3203 *PUNSIGNEDP is set to the signedness of the field.
3205 *PMASK is set to the mask used. This is either contained in a
3206 BIT_AND_EXPR or derived from the width of the field.
3208 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3210 Return 0 if this is not a component reference or is one that we can't
3211 do anything with. */
3214 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3215 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3216 int *punsignedp, int *pvolatilep,
3217 tree *pmask, tree *pand_mask)
3219 tree outer_type = 0;
3221 tree mask, inner, offset;
3223 unsigned int precision;
3225 /* All the optimizations using this function assume integer fields.
3226 There are problems with FP fields since the type_for_size call
3227 below can fail for, e.g., XFmode. */
3228 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3231 /* We are interested in the bare arrangement of bits, so strip everything
3232 that doesn't affect the machine mode. However, record the type of the
3233 outermost expression if it may matter below. */
3234 if (TREE_CODE (exp) == NOP_EXPR
3235 || TREE_CODE (exp) == CONVERT_EXPR
3236 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3237 outer_type = TREE_TYPE (exp);
3240 if (TREE_CODE (exp) == BIT_AND_EXPR)
3242 and_mask = TREE_OPERAND (exp, 1);
3243 exp = TREE_OPERAND (exp, 0);
3244 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3245 if (TREE_CODE (and_mask) != INTEGER_CST)
3249 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3250 punsignedp, pvolatilep);
3251 if ((inner == exp && and_mask == 0)
3252 || *pbitsize < 0 || offset != 0
3253 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3256 /* If the number of bits in the reference is the same as the bitsize of
3257 the outer type, then the outer type gives the signedness. Otherwise
3258 (in case of a small bitfield) the signedness is unchanged. */
3259 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3260 *punsignedp = TYPE_UNSIGNED (outer_type);
3262 /* Compute the mask to access the bitfield. */
3263 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3264 precision = TYPE_PRECISION (unsigned_type);
3266 mask = build_int_2 (~0, ~0);
3267 TREE_TYPE (mask) = unsigned_type;
3268 force_fit_type (mask, 0);
3269 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3270 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3272 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3274 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3275 fold_convert (unsigned_type, and_mask), mask));
3278 *pand_mask = and_mask;
3282 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3286 all_ones_mask_p (tree mask, int size)
3288 tree type = TREE_TYPE (mask);
3289 unsigned int precision = TYPE_PRECISION (type);
3292 tmask = build_int_2 (~0, ~0);
3293 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3294 force_fit_type (tmask, 0);
3296 tree_int_cst_equal (mask,
3297 const_binop (RSHIFT_EXPR,
3298 const_binop (LSHIFT_EXPR, tmask,
3299 size_int (precision - size),
3301 size_int (precision - size), 0));
3304 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3305 represents the sign bit of EXP's type. If EXP represents a sign
3306 or zero extension, also test VAL against the unextended type.
3307 The return value is the (sub)expression whose sign bit is VAL,
3308 or NULL_TREE otherwise. */
3311 sign_bit_p (tree exp, tree val)
3313 unsigned HOST_WIDE_INT mask_lo, lo;
3314 HOST_WIDE_INT mask_hi, hi;
3318 /* Tree EXP must have an integral type. */
3319 t = TREE_TYPE (exp);
3320 if (! INTEGRAL_TYPE_P (t))
3323 /* Tree VAL must be an integer constant. */
3324 if (TREE_CODE (val) != INTEGER_CST
3325 || TREE_CONSTANT_OVERFLOW (val))
3328 width = TYPE_PRECISION (t);
3329 if (width > HOST_BITS_PER_WIDE_INT)
3331 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3334 mask_hi = ((unsigned HOST_WIDE_INT) -1
3335 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3341 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3344 mask_lo = ((unsigned HOST_WIDE_INT) -1
3345 >> (HOST_BITS_PER_WIDE_INT - width));
3348 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3349 treat VAL as if it were unsigned. */
3350 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3351 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3354 /* Handle extension from a narrower type. */
3355 if (TREE_CODE (exp) == NOP_EXPR
3356 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3357 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3362 /* Subroutine for fold_truthop: determine if an operand is simple enough
3363 to be evaluated unconditionally. */
3366 simple_operand_p (tree exp)
3368 /* Strip any conversions that don't change the machine mode. */
3369 while ((TREE_CODE (exp) == NOP_EXPR
3370 || TREE_CODE (exp) == CONVERT_EXPR)
3371 && (TYPE_MODE (TREE_TYPE (exp))
3372 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3373 exp = TREE_OPERAND (exp, 0);
3375 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3377 && ! TREE_ADDRESSABLE (exp)
3378 && ! TREE_THIS_VOLATILE (exp)
3379 && ! DECL_NONLOCAL (exp)
3380 /* Don't regard global variables as simple. They may be
3381 allocated in ways unknown to the compiler (shared memory,
3382 #pragma weak, etc). */
3383 && ! TREE_PUBLIC (exp)
3384 && ! DECL_EXTERNAL (exp)
3385 /* Loading a static variable is unduly expensive, but global
3386 registers aren't expensive. */
3387 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3390 /* The following functions are subroutines to fold_range_test and allow it to
3391 try to change a logical combination of comparisons into a range test.
3394 X == 2 || X == 3 || X == 4 || X == 5
3398 (unsigned) (X - 2) <= 3
3400 We describe each set of comparisons as being either inside or outside
3401 a range, using a variable named like IN_P, and then describe the
3402 range with a lower and upper bound. If one of the bounds is omitted,
3403 it represents either the highest or lowest value of the type.
3405 In the comments below, we represent a range by two numbers in brackets
3406 preceded by a "+" to designate being inside that range, or a "-" to
3407 designate being outside that range, so the condition can be inverted by
3408 flipping the prefix. An omitted bound is represented by a "-". For
3409 example, "- [-, 10]" means being outside the range starting at the lowest
3410 possible value and ending at 10, in other words, being greater than 10.
3411 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3414 We set up things so that the missing bounds are handled in a consistent
3415 manner so neither a missing bound nor "true" and "false" need to be
3416 handled using a special case. */
3418 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3419 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3420 and UPPER1_P are nonzero if the respective argument is an upper bound
3421 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3422 must be specified for a comparison. ARG1 will be converted to ARG0's
3423 type if both are specified. */
3426 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3427 tree arg1, int upper1_p)
3433 /* If neither arg represents infinity, do the normal operation.
3434 Else, if not a comparison, return infinity. Else handle the special
3435 comparison rules. Note that most of the cases below won't occur, but
3436 are handled for consistency. */
3438 if (arg0 != 0 && arg1 != 0)
3440 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3441 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3443 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3446 if (TREE_CODE_CLASS (code) != '<')
3449 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3450 for neither. In real maths, we cannot assume open ended ranges are
3451 the same. But, this is computer arithmetic, where numbers are finite.
3452 We can therefore make the transformation of any unbounded range with
3453 the value Z, Z being greater than any representable number. This permits
3454 us to treat unbounded ranges as equal. */
3455 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3456 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3460 result = sgn0 == sgn1;
3463 result = sgn0 != sgn1;
3466 result = sgn0 < sgn1;
3469 result = sgn0 <= sgn1;
3472 result = sgn0 > sgn1;
3475 result = sgn0 >= sgn1;
3481 return constant_boolean_node (result, type);
3484 /* Given EXP, a logical expression, set the range it is testing into
3485 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3486 actually being tested. *PLOW and *PHIGH will be made of the same type
3487 as the returned expression. If EXP is not a comparison, we will most
3488 likely not be returning a useful value and range. */
3491 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3493 enum tree_code code;
3494 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3495 tree orig_type = NULL_TREE;
3497 tree low, high, n_low, n_high;
3499 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3500 and see if we can refine the range. Some of the cases below may not
3501 happen, but it doesn't seem worth worrying about this. We "continue"
3502 the outer loop when we've changed something; otherwise we "break"
3503 the switch, which will "break" the while. */
3506 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3510 code = TREE_CODE (exp);
3512 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3514 if (first_rtl_op (code) > 0)
3515 arg0 = TREE_OPERAND (exp, 0);
3516 if (TREE_CODE_CLASS (code) == '<'
3517 || TREE_CODE_CLASS (code) == '1'
3518 || TREE_CODE_CLASS (code) == '2')
3519 type = TREE_TYPE (arg0);
3520 if (TREE_CODE_CLASS (code) == '2'
3521 || TREE_CODE_CLASS (code) == '<'
3522 || (TREE_CODE_CLASS (code) == 'e'
3523 && TREE_CODE_LENGTH (code) > 1))
3524 arg1 = TREE_OPERAND (exp, 1);
3527 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3528 lose a cast by accident. */
3529 if (type != NULL_TREE && orig_type == NULL_TREE)
3534 case TRUTH_NOT_EXPR:
3535 in_p = ! in_p, exp = arg0;
3538 case EQ_EXPR: case NE_EXPR:
3539 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3540 /* We can only do something if the range is testing for zero
3541 and if the second operand is an integer constant. Note that
3542 saying something is "in" the range we make is done by
3543 complementing IN_P since it will set in the initial case of
3544 being not equal to zero; "out" is leaving it alone. */
3545 if (low == 0 || high == 0
3546 || ! integer_zerop (low) || ! integer_zerop (high)
3547 || TREE_CODE (arg1) != INTEGER_CST)
3552 case NE_EXPR: /* - [c, c] */
3555 case EQ_EXPR: /* + [c, c] */
3556 in_p = ! in_p, low = high = arg1;
3558 case GT_EXPR: /* - [-, c] */
3559 low = 0, high = arg1;
3561 case GE_EXPR: /* + [c, -] */
3562 in_p = ! in_p, low = arg1, high = 0;
3564 case LT_EXPR: /* - [c, -] */
3565 low = arg1, high = 0;
3567 case LE_EXPR: /* + [-, c] */
3568 in_p = ! in_p, low = 0, high = arg1;
3576 /* If this is an unsigned comparison, we also know that EXP is
3577 greater than or equal to zero. We base the range tests we make
3578 on that fact, so we record it here so we can parse existing
3580 if (TYPE_UNSIGNED (type) && (low == 0 || high == 0))
3582 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3583 1, fold_convert (type, integer_zero_node),
3587 in_p = n_in_p, low = n_low, high = n_high;
3589 /* If the high bound is missing, but we have a nonzero low
3590 bound, reverse the range so it goes from zero to the low bound
3592 if (high == 0 && low && ! integer_zerop (low))
3595 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3596 integer_one_node, 0);
3597 low = fold_convert (type, integer_zero_node);
3603 /* (-x) IN [a,b] -> x in [-b, -a] */
3604 n_low = range_binop (MINUS_EXPR, type,
3605 fold_convert (type, integer_zero_node),
3607 n_high = range_binop (MINUS_EXPR, type,
3608 fold_convert (type, integer_zero_node),
3610 low = n_low, high = n_high;
3616 exp = build2 (MINUS_EXPR, type, negate_expr (arg0),
3617 fold_convert (type, integer_one_node));
3620 case PLUS_EXPR: case MINUS_EXPR:
3621 if (TREE_CODE (arg1) != INTEGER_CST)
3624 /* If EXP is signed, any overflow in the computation is undefined,
3625 so we don't worry about it so long as our computations on
3626 the bounds don't overflow. For unsigned, overflow is defined
3627 and this is exactly the right thing. */
3628 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3629 type, low, 0, arg1, 0);
3630 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3631 type, high, 1, arg1, 0);
3632 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3633 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3636 /* Check for an unsigned range which has wrapped around the maximum
3637 value thus making n_high < n_low, and normalize it. */
3638 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3640 low = range_binop (PLUS_EXPR, type, n_high, 0,
3641 integer_one_node, 0);
3642 high = range_binop (MINUS_EXPR, type, n_low, 0,
3643 integer_one_node, 0);
3645 /* If the range is of the form +/- [ x+1, x ], we won't
3646 be able to normalize it. But then, it represents the
3647 whole range or the empty set, so make it
3649 if (tree_int_cst_equal (n_low, low)
3650 && tree_int_cst_equal (n_high, high))
3656 low = n_low, high = n_high;
3661 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3662 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3665 if (! INTEGRAL_TYPE_P (type)
3666 || (low != 0 && ! int_fits_type_p (low, type))
3667 || (high != 0 && ! int_fits_type_p (high, type)))
3670 n_low = low, n_high = high;
3673 n_low = fold_convert (type, n_low);
3676 n_high = fold_convert (type, n_high);
3678 /* If we're converting from an unsigned to a signed type,
3679 we will be doing the comparison as unsigned. The tests above
3680 have already verified that LOW and HIGH are both positive.
3682 So we have to make sure that the original unsigned value will
3683 be interpreted as positive. */
3684 if (TYPE_UNSIGNED (type) && ! TYPE_UNSIGNED (TREE_TYPE (exp)))
3686 tree equiv_type = lang_hooks.types.type_for_mode
3687 (TYPE_MODE (type), 1);
3690 /* A range without an upper bound is, naturally, unbounded.
3691 Since convert would have cropped a very large value, use
3692 the max value for the destination type. */
3694 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3695 : TYPE_MAX_VALUE (type);
3697 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3698 high_positive = fold (build2 (RSHIFT_EXPR, type,
3702 integer_one_node)));
3704 /* If the low bound is specified, "and" the range with the
3705 range for which the original unsigned value will be
3709 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3710 1, n_low, n_high, 1,
3711 fold_convert (type, integer_zero_node),
3715 in_p = (n_in_p == in_p);
3719 /* Otherwise, "or" the range with the range of the input
3720 that will be interpreted as negative. */
3721 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3722 0, n_low, n_high, 1,
3723 fold_convert (type, integer_zero_node),
3727 in_p = (in_p != n_in_p);
3732 low = n_low, high = n_high;
3742 /* If EXP is a constant, we can evaluate whether this is true or false. */
3743 if (TREE_CODE (exp) == INTEGER_CST)
3745 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3747 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3753 *pin_p = in_p, *plow = low, *phigh = high;
3757 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3758 type, TYPE, return an expression to test if EXP is in (or out of, depending
3759 on IN_P) the range. */
3762 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3764 tree etype = TREE_TYPE (exp);
3768 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3769 return invert_truthvalue (value);
3771 if (low == 0 && high == 0)
3772 return fold_convert (type, integer_one_node);
3775 return fold (build2 (LE_EXPR, type, exp, high));
3778 return fold (build2 (GE_EXPR, type, exp, low));
3780 if (operand_equal_p (low, high, 0))
3781 return fold (build2 (EQ_EXPR, type, exp, low));
3783 if (integer_zerop (low))
3785 if (! TYPE_UNSIGNED (etype))
3787 etype = lang_hooks.types.unsigned_type (etype);
3788 high = fold_convert (etype, high);
3789 exp = fold_convert (etype, exp);
3791 return build_range_check (type, exp, 1, 0, high);
3794 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3795 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3797 unsigned HOST_WIDE_INT lo;
3801 prec = TYPE_PRECISION (etype);
3802 if (prec <= HOST_BITS_PER_WIDE_INT)
3805 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3809 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3810 lo = (unsigned HOST_WIDE_INT) -1;
3813 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3815 if (TYPE_UNSIGNED (etype))
3817 etype = lang_hooks.types.signed_type (etype);
3818 exp = fold_convert (etype, exp);
3820 return fold (build2 (GT_EXPR, type, exp,
3821 fold_convert (etype, integer_zero_node)));
3825 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3826 && ! TREE_OVERFLOW (value))
3827 return build_range_check (type,
3828 fold (build2 (MINUS_EXPR, etype, exp, low)),
3829 1, fold_convert (etype, integer_zero_node),
3835 /* Given two ranges, see if we can merge them into one. Return 1 if we
3836 can, 0 if we can't. Set the output range into the specified parameters. */
3839 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3840 tree high0, int in1_p, tree low1, tree high1)
3848 int lowequal = ((low0 == 0 && low1 == 0)
3849 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3850 low0, 0, low1, 0)));
3851 int highequal = ((high0 == 0 && high1 == 0)
3852 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3853 high0, 1, high1, 1)));
3855 /* Make range 0 be the range that starts first, or ends last if they
3856 start at the same value. Swap them if it isn't. */
3857 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3860 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3861 high1, 1, high0, 1))))
3863 temp = in0_p, in0_p = in1_p, in1_p = temp;
3864 tem = low0, low0 = low1, low1 = tem;
3865 tem = high0, high0 = high1, high1 = tem;
3868 /* Now flag two cases, whether the ranges are disjoint or whether the
3869 second range is totally subsumed in the first. Note that the tests
3870 below are simplified by the ones above. */
3871 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3872 high0, 1, low1, 0));
3873 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3874 high1, 1, high0, 1));
3876 /* We now have four cases, depending on whether we are including or
3877 excluding the two ranges. */
3880 /* If they don't overlap, the result is false. If the second range
3881 is a subset it is the result. Otherwise, the range is from the start
3882 of the second to the end of the first. */
3884 in_p = 0, low = high = 0;
3886 in_p = 1, low = low1, high = high1;
3888 in_p = 1, low = low1, high = high0;
3891 else if (in0_p && ! in1_p)
3893 /* If they don't overlap, the result is the first range. If they are
3894 equal, the result is false. If the second range is a subset of the
3895 first, and the ranges begin at the same place, we go from just after
3896 the end of the first range to the end of the second. If the second
3897 range is not a subset of the first, or if it is a subset and both
3898 ranges end at the same place, the range starts at the start of the
3899 first range and ends just before the second range.
3900 Otherwise, we can't describe this as a single range. */
3902 in_p = 1, low = low0, high = high0;
3903 else if (lowequal && highequal)
3904 in_p = 0, low = high = 0;
3905 else if (subset && lowequal)
3907 in_p = 1, high = high0;
3908 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3909 integer_one_node, 0);
3911 else if (! subset || highequal)
3913 in_p = 1, low = low0;
3914 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3915 integer_one_node, 0);
3921 else if (! in0_p && in1_p)
3923 /* If they don't overlap, the result is the second range. If the second
3924 is a subset of the first, the result is false. Otherwise,
3925 the range starts just after the first range and ends at the
3926 end of the second. */
3928 in_p = 1, low = low1, high = high1;
3929 else if (subset || highequal)
3930 in_p = 0, low = high = 0;
3933 in_p = 1, high = high1;
3934 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3935 integer_one_node, 0);
3941 /* The case where we are excluding both ranges. Here the complex case
3942 is if they don't overlap. In that case, the only time we have a
3943 range is if they are adjacent. If the second is a subset of the
3944 first, the result is the first. Otherwise, the range to exclude
3945 starts at the beginning of the first range and ends at the end of the
3949 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3950 range_binop (PLUS_EXPR, NULL_TREE,
3952 integer_one_node, 1),
3954 in_p = 0, low = low0, high = high1;
3959 in_p = 0, low = low0, high = high0;
3961 in_p = 0, low = low0, high = high1;
3964 *pin_p = in_p, *plow = low, *phigh = high;
3968 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3969 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3972 /* EXP is some logical combination of boolean tests. See if we can
3973 merge it into some range test. Return the new tree if so. */
3976 fold_range_test (tree exp)
3978 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3979 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3980 int in0_p, in1_p, in_p;
3981 tree low0, low1, low, high0, high1, high;
3982 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3983 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3986 /* If this is an OR operation, invert both sides; we will invert
3987 again at the end. */
3989 in0_p = ! in0_p, in1_p = ! in1_p;
3991 /* If both expressions are the same, if we can merge the ranges, and we
3992 can build the range test, return it or it inverted. If one of the
3993 ranges is always true or always false, consider it to be the same
3994 expression as the other. */
3995 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3996 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3998 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4000 : rhs != 0 ? rhs : integer_zero_node,
4002 return or_op ? invert_truthvalue (tem) : tem;
4004 /* On machines where the branch cost is expensive, if this is a
4005 short-circuited branch and the underlying object on both sides
4006 is the same, make a non-short-circuit operation. */
4007 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4008 && lhs != 0 && rhs != 0
4009 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4010 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4011 && operand_equal_p (lhs, rhs, 0))
4013 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4014 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4015 which cases we can't do this. */
4016 if (simple_operand_p (lhs))
4017 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4018 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4019 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4020 TREE_OPERAND (exp, 1));
4022 else if (lang_hooks.decls.global_bindings_p () == 0
4023 && ! CONTAINS_PLACEHOLDER_P (lhs))
4025 tree common = save_expr (lhs);
4027 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4028 or_op ? ! in0_p : in0_p,
4030 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4031 or_op ? ! in1_p : in1_p,
4033 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4034 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4035 TREE_TYPE (exp), lhs, rhs);
4042 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4043 bit value. Arrange things so the extra bits will be set to zero if and
4044 only if C is signed-extended to its full width. If MASK is nonzero,
4045 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4048 unextend (tree c, int p, int unsignedp, tree mask)
4050 tree type = TREE_TYPE (c);
4051 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4054 if (p == modesize || unsignedp)
4057 /* We work by getting just the sign bit into the low-order bit, then
4058 into the high-order bit, then sign-extend. We then XOR that value
4060 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4061 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4063 /* We must use a signed type in order to get an arithmetic right shift.
4064 However, we must also avoid introducing accidental overflows, so that
4065 a subsequent call to integer_zerop will work. Hence we must
4066 do the type conversion here. At this point, the constant is either
4067 zero or one, and the conversion to a signed type can never overflow.
4068 We could get an overflow if this conversion is done anywhere else. */
4069 if (TYPE_UNSIGNED (type))
4070 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4072 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4073 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4075 temp = const_binop (BIT_AND_EXPR, temp,
4076 fold_convert (TREE_TYPE (c), mask), 0);
4077 /* If necessary, convert the type back to match the type of C. */
4078 if (TYPE_UNSIGNED (type))
4079 temp = fold_convert (type, temp);
4081 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4084 /* Find ways of folding logical expressions of LHS and RHS:
4085 Try to merge two comparisons to the same innermost item.
4086 Look for range tests like "ch >= '0' && ch <= '9'".
4087 Look for combinations of simple terms on machines with expensive branches
4088 and evaluate the RHS unconditionally.
4090 For example, if we have p->a == 2 && p->b == 4 and we can make an
4091 object large enough to span both A and B, we can do this with a comparison
4092 against the object ANDed with the a mask.
4094 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4095 operations to do this with one comparison.
4097 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4098 function and the one above.
4100 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4101 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4103 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4106 We return the simplified tree or 0 if no optimization is possible. */
4109 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4111 /* If this is the "or" of two comparisons, we can do something if
4112 the comparisons are NE_EXPR. If this is the "and", we can do something
4113 if the comparisons are EQ_EXPR. I.e.,
4114 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4116 WANTED_CODE is this operation code. For single bit fields, we can
4117 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4118 comparison for one-bit fields. */
4120 enum tree_code wanted_code;
4121 enum tree_code lcode, rcode;
4122 tree ll_arg, lr_arg, rl_arg, rr_arg;
4123 tree ll_inner, lr_inner, rl_inner, rr_inner;
4124 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4125 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4126 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4127 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4128 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4129 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4130 enum machine_mode lnmode, rnmode;
4131 tree ll_mask, lr_mask, rl_mask, rr_mask;
4132 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4133 tree l_const, r_const;
4134 tree lntype, rntype, result;
4135 int first_bit, end_bit;
4138 /* Start by getting the comparison codes. Fail if anything is volatile.
4139 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4140 it were surrounded with a NE_EXPR. */
4142 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4145 lcode = TREE_CODE (lhs);
4146 rcode = TREE_CODE (rhs);
4148 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4150 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4154 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4156 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4160 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4163 ll_arg = TREE_OPERAND (lhs, 0);
4164 lr_arg = TREE_OPERAND (lhs, 1);
4165 rl_arg = TREE_OPERAND (rhs, 0);
4166 rr_arg = TREE_OPERAND (rhs, 1);
4168 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4169 if (simple_operand_p (ll_arg)
4170 && simple_operand_p (lr_arg))
4173 if (operand_equal_p (ll_arg, rl_arg, 0)
4174 && operand_equal_p (lr_arg, rr_arg, 0))
4176 result = combine_comparisons (code, lcode, rcode,
4177 truth_type, ll_arg, lr_arg);
4181 else if (operand_equal_p (ll_arg, rr_arg, 0)
4182 && operand_equal_p (lr_arg, rl_arg, 0))
4184 result = combine_comparisons (code, lcode,
4185 swap_tree_comparison (rcode),
4186 truth_type, ll_arg, lr_arg);
4192 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4193 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4195 /* If the RHS can be evaluated unconditionally and its operands are
4196 simple, it wins to evaluate the RHS unconditionally on machines
4197 with expensive branches. In this case, this isn't a comparison
4198 that can be merged. Avoid doing this if the RHS is a floating-point
4199 comparison since those can trap. */
4201 if (BRANCH_COST >= 2
4202 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4203 && simple_operand_p (rl_arg)
4204 && simple_operand_p (rr_arg))
4206 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4207 if (code == TRUTH_OR_EXPR
4208 && lcode == NE_EXPR && integer_zerop (lr_arg)
4209 && rcode == NE_EXPR && integer_zerop (rr_arg)
4210 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4211 return build2 (NE_EXPR, truth_type,
4212 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4214 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4216 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4217 if (code == TRUTH_AND_EXPR
4218 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4219 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4220 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4221 return build2 (EQ_EXPR, truth_type,
4222 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4224 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4226 return build2 (code, truth_type, lhs, rhs);
4229 /* See if the comparisons can be merged. Then get all the parameters for
4232 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4233 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4237 ll_inner = decode_field_reference (ll_arg,
4238 &ll_bitsize, &ll_bitpos, &ll_mode,
4239 &ll_unsignedp, &volatilep, &ll_mask,
4241 lr_inner = decode_field_reference (lr_arg,
4242 &lr_bitsize, &lr_bitpos, &lr_mode,
4243 &lr_unsignedp, &volatilep, &lr_mask,
4245 rl_inner = decode_field_reference (rl_arg,
4246 &rl_bitsize, &rl_bitpos, &rl_mode,
4247 &rl_unsignedp, &volatilep, &rl_mask,
4249 rr_inner = decode_field_reference (rr_arg,
4250 &rr_bitsize, &rr_bitpos, &rr_mode,
4251 &rr_unsignedp, &volatilep, &rr_mask,
4254 /* It must be true that the inner operation on the lhs of each
4255 comparison must be the same if we are to be able to do anything.
4256 Then see if we have constants. If not, the same must be true for
4258 if (volatilep || ll_inner == 0 || rl_inner == 0
4259 || ! operand_equal_p (ll_inner, rl_inner, 0))
4262 if (TREE_CODE (lr_arg) == INTEGER_CST
4263 && TREE_CODE (rr_arg) == INTEGER_CST)
4264 l_const = lr_arg, r_const = rr_arg;
4265 else if (lr_inner == 0 || rr_inner == 0
4266 || ! operand_equal_p (lr_inner, rr_inner, 0))
4269 l_const = r_const = 0;
4271 /* If either comparison code is not correct for our logical operation,
4272 fail. However, we can convert a one-bit comparison against zero into
4273 the opposite comparison against that bit being set in the field. */
4275 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4276 if (lcode != wanted_code)
4278 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4280 /* Make the left operand unsigned, since we are only interested
4281 in the value of one bit. Otherwise we are doing the wrong
4290 /* This is analogous to the code for l_const above. */
4291 if (rcode != wanted_code)
4293 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4302 /* After this point all optimizations will generate bit-field
4303 references, which we might not want. */
4304 if (! lang_hooks.can_use_bit_fields_p ())
4307 /* See if we can find a mode that contains both fields being compared on
4308 the left. If we can't, fail. Otherwise, update all constants and masks
4309 to be relative to a field of that size. */
4310 first_bit = MIN (ll_bitpos, rl_bitpos);
4311 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4312 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4313 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4315 if (lnmode == VOIDmode)
4318 lnbitsize = GET_MODE_BITSIZE (lnmode);
4319 lnbitpos = first_bit & ~ (lnbitsize - 1);
4320 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4321 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4323 if (BYTES_BIG_ENDIAN)
4325 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4326 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4329 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4330 size_int (xll_bitpos), 0);
4331 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4332 size_int (xrl_bitpos), 0);
4336 l_const = fold_convert (lntype, l_const);
4337 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4338 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4339 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4340 fold (build1 (BIT_NOT_EXPR,
4344 warning ("comparison is always %d", wanted_code == NE_EXPR);
4346 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4351 r_const = fold_convert (lntype, r_const);
4352 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4353 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4354 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4355 fold (build1 (BIT_NOT_EXPR,
4359 warning ("comparison is always %d", wanted_code == NE_EXPR);
4361 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4365 /* If the right sides are not constant, do the same for it. Also,
4366 disallow this optimization if a size or signedness mismatch occurs
4367 between the left and right sides. */
4370 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4371 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4372 /* Make sure the two fields on the right
4373 correspond to the left without being swapped. */
4374 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4377 first_bit = MIN (lr_bitpos, rr_bitpos);
4378 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4379 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4380 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4382 if (rnmode == VOIDmode)
4385 rnbitsize = GET_MODE_BITSIZE (rnmode);
4386 rnbitpos = first_bit & ~ (rnbitsize - 1);
4387 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4388 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4390 if (BYTES_BIG_ENDIAN)
4392 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4393 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4396 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4397 size_int (xlr_bitpos), 0);
4398 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4399 size_int (xrr_bitpos), 0);
4401 /* Make a mask that corresponds to both fields being compared.
4402 Do this for both items being compared. If the operands are the
4403 same size and the bits being compared are in the same position
4404 then we can do this by masking both and comparing the masked
4406 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4407 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4408 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4410 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4411 ll_unsignedp || rl_unsignedp);
4412 if (! all_ones_mask_p (ll_mask, lnbitsize))
4413 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4415 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4416 lr_unsignedp || rr_unsignedp);
4417 if (! all_ones_mask_p (lr_mask, rnbitsize))
4418 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4420 return build2 (wanted_code, truth_type, lhs, rhs);
4423 /* There is still another way we can do something: If both pairs of
4424 fields being compared are adjacent, we may be able to make a wider
4425 field containing them both.
4427 Note that we still must mask the lhs/rhs expressions. Furthermore,
4428 the mask must be shifted to account for the shift done by
4429 make_bit_field_ref. */
4430 if ((ll_bitsize + ll_bitpos == rl_bitpos
4431 && lr_bitsize + lr_bitpos == rr_bitpos)
4432 || (ll_bitpos == rl_bitpos + rl_bitsize
4433 && lr_bitpos == rr_bitpos + rr_bitsize))
4437 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4438 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4439 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4440 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4442 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4443 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4444 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4445 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4447 /* Convert to the smaller type before masking out unwanted bits. */
4449 if (lntype != rntype)
4451 if (lnbitsize > rnbitsize)
4453 lhs = fold_convert (rntype, lhs);
4454 ll_mask = fold_convert (rntype, ll_mask);
4457 else if (lnbitsize < rnbitsize)
4459 rhs = fold_convert (lntype, rhs);
4460 lr_mask = fold_convert (lntype, lr_mask);
4465 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4466 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4468 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4469 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4471 return build2 (wanted_code, truth_type, lhs, rhs);
4477 /* Handle the case of comparisons with constants. If there is something in
4478 common between the masks, those bits of the constants must be the same.
4479 If not, the condition is always false. Test for this to avoid generating
4480 incorrect code below. */
4481 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4482 if (! integer_zerop (result)
4483 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4484 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4486 if (wanted_code == NE_EXPR)
4488 warning ("`or' of unmatched not-equal tests is always 1");
4489 return constant_boolean_node (true, truth_type);
4493 warning ("`and' of mutually exclusive equal-tests is always 0");
4494 return constant_boolean_node (false, truth_type);
4498 /* Construct the expression we will return. First get the component
4499 reference we will make. Unless the mask is all ones the width of
4500 that field, perform the mask operation. Then compare with the
4502 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4503 ll_unsignedp || rl_unsignedp);
4505 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4506 if (! all_ones_mask_p (ll_mask, lnbitsize))
4507 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4509 return build2 (wanted_code, truth_type, result,
4510 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4513 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4517 optimize_minmax_comparison (tree t)
4519 tree type = TREE_TYPE (t);
4520 tree arg0 = TREE_OPERAND (t, 0);
4521 enum tree_code op_code;
4522 tree comp_const = TREE_OPERAND (t, 1);
4524 int consts_equal, consts_lt;
4527 STRIP_SIGN_NOPS (arg0);
4529 op_code = TREE_CODE (arg0);
4530 minmax_const = TREE_OPERAND (arg0, 1);
4531 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4532 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4533 inner = TREE_OPERAND (arg0, 0);
4535 /* If something does not permit us to optimize, return the original tree. */
4536 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4537 || TREE_CODE (comp_const) != INTEGER_CST
4538 || TREE_CONSTANT_OVERFLOW (comp_const)
4539 || TREE_CODE (minmax_const) != INTEGER_CST
4540 || TREE_CONSTANT_OVERFLOW (minmax_const))
4543 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4544 and GT_EXPR, doing the rest with recursive calls using logical
4546 switch (TREE_CODE (t))
4548 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4550 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4554 fold (build2 (TRUTH_ORIF_EXPR, type,
4555 optimize_minmax_comparison
4556 (build2 (EQ_EXPR, type, arg0, comp_const)),
4557 optimize_minmax_comparison
4558 (build2 (GT_EXPR, type, arg0, comp_const))));
4561 if (op_code == MAX_EXPR && consts_equal)
4562 /* MAX (X, 0) == 0 -> X <= 0 */
4563 return fold (build2 (LE_EXPR, type, inner, comp_const));
4565 else if (op_code == MAX_EXPR && consts_lt)
4566 /* MAX (X, 0) == 5 -> X == 5 */
4567 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4569 else if (op_code == MAX_EXPR)
4570 /* MAX (X, 0) == -1 -> false */
4571 return omit_one_operand (type, integer_zero_node, inner);
4573 else if (consts_equal)
4574 /* MIN (X, 0) == 0 -> X >= 0 */
4575 return fold (build2 (GE_EXPR, type, inner, comp_const));
4578 /* MIN (X, 0) == 5 -> false */
4579 return omit_one_operand (type, integer_zero_node, inner);
4582 /* MIN (X, 0) == -1 -> X == -1 */
4583 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4586 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4587 /* MAX (X, 0) > 0 -> X > 0
4588 MAX (X, 0) > 5 -> X > 5 */
4589 return fold (build2 (GT_EXPR, type, inner, comp_const));
4591 else if (op_code == MAX_EXPR)
4592 /* MAX (X, 0) > -1 -> true */
4593 return omit_one_operand (type, integer_one_node, inner);
4595 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4596 /* MIN (X, 0) > 0 -> false
4597 MIN (X, 0) > 5 -> false */
4598 return omit_one_operand (type, integer_zero_node, inner);
4601 /* MIN (X, 0) > -1 -> X > -1 */
4602 return fold (build2 (GT_EXPR, type, inner, comp_const));
4609 /* T is an integer expression that is being multiplied, divided, or taken a
4610 modulus (CODE says which and what kind of divide or modulus) by a
4611 constant C. See if we can eliminate that operation by folding it with
4612 other operations already in T. WIDE_TYPE, if non-null, is a type that
4613 should be used for the computation if wider than our type.
4615 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4616 (X * 2) + (Y * 4). We must, however, be assured that either the original
4617 expression would not overflow or that overflow is undefined for the type
4618 in the language in question.
4620 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4621 the machine has a multiply-accumulate insn or that this is part of an
4622 addressing calculation.
4624 If we return a non-null expression, it is an equivalent form of the
4625 original computation, but need not be in the original type. */
4628 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4630 /* To avoid exponential search depth, refuse to allow recursion past
4631 three levels. Beyond that (1) it's highly unlikely that we'll find
4632 something interesting and (2) we've probably processed it before
4633 when we built the inner expression. */
4642 ret = extract_muldiv_1 (t, c, code, wide_type);
4649 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4651 tree type = TREE_TYPE (t);
4652 enum tree_code tcode = TREE_CODE (t);
4653 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4654 > GET_MODE_SIZE (TYPE_MODE (type)))
4655 ? wide_type : type);
4657 int same_p = tcode == code;
4658 tree op0 = NULL_TREE, op1 = NULL_TREE;
4660 /* Don't deal with constants of zero here; they confuse the code below. */
4661 if (integer_zerop (c))
4664 if (TREE_CODE_CLASS (tcode) == '1')
4665 op0 = TREE_OPERAND (t, 0);
4667 if (TREE_CODE_CLASS (tcode) == '2')
4668 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4670 /* Note that we need not handle conditional operations here since fold
4671 already handles those cases. So just do arithmetic here. */
4675 /* For a constant, we can always simplify if we are a multiply
4676 or (for divide and modulus) if it is a multiple of our constant. */
4677 if (code == MULT_EXPR
4678 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4679 return const_binop (code, fold_convert (ctype, t),
4680 fold_convert (ctype, c), 0);
4683 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4684 /* If op0 is an expression ... */
4685 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4686 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4687 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4688 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4689 /* ... and is unsigned, and its type is smaller than ctype,
4690 then we cannot pass through as widening. */
4691 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
4692 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4693 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4694 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4695 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4696 /* ... or its type is larger than ctype,
4697 then we cannot pass through this truncation. */
4698 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4699 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4700 /* ... or signedness changes for division or modulus,
4701 then we cannot pass through this conversion. */
4702 || (code != MULT_EXPR
4703 && (TYPE_UNSIGNED (ctype)
4704 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
4707 /* Pass the constant down and see if we can make a simplification. If
4708 we can, replace this expression with the inner simplification for
4709 possible later conversion to our or some other type. */
4710 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4711 && TREE_CODE (t2) == INTEGER_CST
4712 && ! TREE_CONSTANT_OVERFLOW (t2)
4713 && (0 != (t1 = extract_muldiv (op0, t2, code,
4715 ? ctype : NULL_TREE))))
4719 case NEGATE_EXPR: case ABS_EXPR:
4720 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4721 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4724 case MIN_EXPR: case MAX_EXPR:
4725 /* If widening the type changes the signedness, then we can't perform
4726 this optimization as that changes the result. */
4727 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
4730 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4731 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4732 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4734 if (tree_int_cst_sgn (c) < 0)
4735 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4737 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4738 fold_convert (ctype, t2)));
4742 case LSHIFT_EXPR: case RSHIFT_EXPR:
4743 /* If the second operand is constant, this is a multiplication
4744 or floor division, by a power of two, so we can treat it that
4745 way unless the multiplier or divisor overflows. */
4746 if (TREE_CODE (op1) == INTEGER_CST
4747 /* const_binop may not detect overflow correctly,
4748 so check for it explicitly here. */
4749 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4750 && TREE_INT_CST_HIGH (op1) == 0
4751 && 0 != (t1 = fold_convert (ctype,
4752 const_binop (LSHIFT_EXPR,
4755 && ! TREE_OVERFLOW (t1))
4756 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
4757 ? MULT_EXPR : FLOOR_DIV_EXPR,
4758 ctype, fold_convert (ctype, op0), t1),
4759 c, code, wide_type);
4762 case PLUS_EXPR: case MINUS_EXPR:
4763 /* See if we can eliminate the operation on both sides. If we can, we
4764 can return a new PLUS or MINUS. If we can't, the only remaining
4765 cases where we can do anything are if the second operand is a
4767 t1 = extract_muldiv (op0, c, code, wide_type);
4768 t2 = extract_muldiv (op1, c, code, wide_type);
4769 if (t1 != 0 && t2 != 0
4770 && (code == MULT_EXPR
4771 /* If not multiplication, we can only do this if both operands
4772 are divisible by c. */
4773 || (multiple_of_p (ctype, op0, c)
4774 && multiple_of_p (ctype, op1, c))))
4775 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4776 fold_convert (ctype, t2)));
4778 /* If this was a subtraction, negate OP1 and set it to be an addition.
4779 This simplifies the logic below. */
4780 if (tcode == MINUS_EXPR)
4781 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4783 if (TREE_CODE (op1) != INTEGER_CST)
4786 /* If either OP1 or C are negative, this optimization is not safe for
4787 some of the division and remainder types while for others we need
4788 to change the code. */
4789 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4791 if (code == CEIL_DIV_EXPR)
4792 code = FLOOR_DIV_EXPR;
4793 else if (code == FLOOR_DIV_EXPR)
4794 code = CEIL_DIV_EXPR;
4795 else if (code != MULT_EXPR
4796 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4800 /* If it's a multiply or a division/modulus operation of a multiple
4801 of our constant, do the operation and verify it doesn't overflow. */
4802 if (code == MULT_EXPR
4803 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4805 op1 = const_binop (code, fold_convert (ctype, op1),
4806 fold_convert (ctype, c), 0);
4807 /* We allow the constant to overflow with wrapping semantics. */
4809 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4815 /* If we have an unsigned type is not a sizetype, we cannot widen
4816 the operation since it will change the result if the original
4817 computation overflowed. */
4818 if (TYPE_UNSIGNED (ctype)
4819 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4823 /* If we were able to eliminate our operation from the first side,
4824 apply our operation to the second side and reform the PLUS. */
4825 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4826 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
4828 /* The last case is if we are a multiply. In that case, we can
4829 apply the distributive law to commute the multiply and addition
4830 if the multiplication of the constants doesn't overflow. */
4831 if (code == MULT_EXPR)
4832 return fold (build2 (tcode, ctype,
4833 fold (build2 (code, ctype,
4834 fold_convert (ctype, op0),
4835 fold_convert (ctype, c))),
4841 /* We have a special case here if we are doing something like
4842 (C * 8) % 4 since we know that's zero. */
4843 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4844 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4845 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4846 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4847 return omit_one_operand (type, integer_zero_node, op0);
4849 /* ... fall through ... */
4851 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4852 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4853 /* If we can extract our operation from the LHS, do so and return a
4854 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4855 do something only if the second operand is a constant. */
4857 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4858 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
4859 fold_convert (ctype, op1)));
4860 else if (tcode == MULT_EXPR && code == MULT_EXPR
4861 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4862 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4863 fold_convert (ctype, t1)));
4864 else if (TREE_CODE (op1) != INTEGER_CST)
4867 /* If these are the same operation types, we can associate them
4868 assuming no overflow. */
4870 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4871 fold_convert (ctype, c), 0))
4872 && ! TREE_OVERFLOW (t1))
4873 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
4875 /* If these operations "cancel" each other, we have the main
4876 optimizations of this pass, which occur when either constant is a
4877 multiple of the other, in which case we replace this with either an
4878 operation or CODE or TCODE.
4880 If we have an unsigned type that is not a sizetype, we cannot do
4881 this since it will change the result if the original computation
4883 if ((! TYPE_UNSIGNED (ctype)
4884 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4886 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4887 || (tcode == MULT_EXPR
4888 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4889 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4891 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4892 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
4893 fold_convert (ctype,
4894 const_binop (TRUNC_DIV_EXPR,
4896 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4897 return fold (build2 (code, ctype, fold_convert (ctype, op0),
4898 fold_convert (ctype,
4899 const_binop (TRUNC_DIV_EXPR,
4911 /* Return a node which has the indicated constant VALUE (either 0 or
4912 1), and is of the indicated TYPE. */
4915 constant_boolean_node (int value, tree type)
4917 if (type == integer_type_node)
4918 return value ? integer_one_node : integer_zero_node;
4919 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4920 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4921 : integer_zero_node);
4924 tree t = build_int_2 (value, 0);
4926 TREE_TYPE (t) = type;
4931 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4932 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4933 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4934 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4935 COND is the first argument to CODE; otherwise (as in the example
4936 given here), it is the second argument. TYPE is the type of the
4937 original expression. Return NULL_TREE if no simplification is
4941 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4942 tree cond, tree arg, int cond_first_p)
4944 tree test, true_value, false_value;
4945 tree lhs = NULL_TREE;
4946 tree rhs = NULL_TREE;
4948 /* This transformation is only worthwhile if we don't have to wrap
4949 arg in a SAVE_EXPR, and the operation can be simplified on atleast
4950 one of the branches once its pushed inside the COND_EXPR. */
4951 if (!TREE_CONSTANT (arg))
4954 if (TREE_CODE (cond) == COND_EXPR)
4956 test = TREE_OPERAND (cond, 0);
4957 true_value = TREE_OPERAND (cond, 1);
4958 false_value = TREE_OPERAND (cond, 2);
4959 /* If this operand throws an expression, then it does not make
4960 sense to try to perform a logical or arithmetic operation
4962 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4964 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4969 tree testtype = TREE_TYPE (cond);
4971 true_value = constant_boolean_node (true, testtype);
4972 false_value = constant_boolean_node (false, testtype);
4976 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
4977 : build2 (code, type, arg, true_value));
4979 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
4980 : build2 (code, type, arg, false_value));
4982 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
4983 return fold_convert (type, test);
4987 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4989 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4990 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4991 ADDEND is the same as X.
4993 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4994 and finite. The problematic cases are when X is zero, and its mode
4995 has signed zeros. In the case of rounding towards -infinity,
4996 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4997 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5000 fold_real_zero_addition_p (tree type, tree addend, int negate)
5002 if (!real_zerop (addend))
5005 /* Don't allow the fold with -fsignaling-nans. */
5006 if (HONOR_SNANS (TYPE_MODE (type)))
5009 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5010 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5013 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5014 if (TREE_CODE (addend) == REAL_CST
5015 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5018 /* The mode has signed zeros, and we have to honor their sign.
5019 In this situation, there is only one case we can return true for.
5020 X - 0 is the same as X unless rounding towards -infinity is
5022 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5025 /* Subroutine of fold() that checks comparisons of built-in math
5026 functions against real constants.
5028 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5029 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5030 is the type of the result and ARG0 and ARG1 are the operands of the
5031 comparison. ARG1 must be a TREE_REAL_CST.
5033 The function returns the constant folded tree if a simplification
5034 can be made, and NULL_TREE otherwise. */
5037 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5038 tree type, tree arg0, tree arg1)
5042 if (BUILTIN_SQRT_P (fcode))
5044 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5045 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5047 c = TREE_REAL_CST (arg1);
5048 if (REAL_VALUE_NEGATIVE (c))
5050 /* sqrt(x) < y is always false, if y is negative. */
5051 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5052 return omit_one_operand (type, integer_zero_node, arg);
5054 /* sqrt(x) > y is always true, if y is negative and we
5055 don't care about NaNs, i.e. negative values of x. */
5056 if (code == NE_EXPR || !HONOR_NANS (mode))
5057 return omit_one_operand (type, integer_one_node, arg);
5059 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5060 return fold (build2 (GE_EXPR, type, arg,
5061 build_real (TREE_TYPE (arg), dconst0)));
5063 else if (code == GT_EXPR || code == GE_EXPR)
5067 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5068 real_convert (&c2, mode, &c2);
5070 if (REAL_VALUE_ISINF (c2))
5072 /* sqrt(x) > y is x == +Inf, when y is very large. */
5073 if (HONOR_INFINITIES (mode))
5074 return fold (build2 (EQ_EXPR, type, arg,
5075 build_real (TREE_TYPE (arg), c2)));
5077 /* sqrt(x) > y is always false, when y is very large
5078 and we don't care about infinities. */
5079 return omit_one_operand (type, integer_zero_node, arg);
5082 /* sqrt(x) > c is the same as x > c*c. */
5083 return fold (build2 (code, type, arg,
5084 build_real (TREE_TYPE (arg), c2)));
5086 else if (code == LT_EXPR || code == LE_EXPR)
5090 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5091 real_convert (&c2, mode, &c2);
5093 if (REAL_VALUE_ISINF (c2))
5095 /* sqrt(x) < y is always true, when y is a very large
5096 value and we don't care about NaNs or Infinities. */
5097 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5098 return omit_one_operand (type, integer_one_node, arg);
5100 /* sqrt(x) < y is x != +Inf when y is very large and we
5101 don't care about NaNs. */
5102 if (! HONOR_NANS (mode))
5103 return fold (build2 (NE_EXPR, type, arg,
5104 build_real (TREE_TYPE (arg), c2)));
5106 /* sqrt(x) < y is x >= 0 when y is very large and we
5107 don't care about Infinities. */
5108 if (! HONOR_INFINITIES (mode))
5109 return fold (build2 (GE_EXPR, type, arg,
5110 build_real (TREE_TYPE (arg), dconst0)));
5112 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5113 if (lang_hooks.decls.global_bindings_p () != 0
5114 || CONTAINS_PLACEHOLDER_P (arg))
5117 arg = save_expr (arg);
5118 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5119 fold (build2 (GE_EXPR, type, arg,
5120 build_real (TREE_TYPE (arg),
5122 fold (build2 (NE_EXPR, type, arg,
5123 build_real (TREE_TYPE (arg),
5127 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5128 if (! HONOR_NANS (mode))
5129 return fold (build2 (code, type, arg,
5130 build_real (TREE_TYPE (arg), c2)));
5132 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5133 if (lang_hooks.decls.global_bindings_p () == 0
5134 && ! CONTAINS_PLACEHOLDER_P (arg))
5136 arg = save_expr (arg);
5137 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5138 fold (build2 (GE_EXPR, type, arg,
5139 build_real (TREE_TYPE (arg),
5141 fold (build2 (code, type, arg,
5142 build_real (TREE_TYPE (arg),
5151 /* Subroutine of fold() that optimizes comparisons against Infinities,
5152 either +Inf or -Inf.
5154 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5155 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5156 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5158 The function returns the constant folded tree if a simplification
5159 can be made, and NULL_TREE otherwise. */
5162 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5164 enum machine_mode mode;
5165 REAL_VALUE_TYPE max;
5169 mode = TYPE_MODE (TREE_TYPE (arg0));
5171 /* For negative infinity swap the sense of the comparison. */
5172 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5174 code = swap_tree_comparison (code);
5179 /* x > +Inf is always false, if with ignore sNANs. */
5180 if (HONOR_SNANS (mode))
5182 return omit_one_operand (type, integer_zero_node, arg0);
5185 /* x <= +Inf is always true, if we don't case about NaNs. */
5186 if (! HONOR_NANS (mode))
5187 return omit_one_operand (type, integer_one_node, arg0);
5189 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5190 if (lang_hooks.decls.global_bindings_p () == 0
5191 && ! CONTAINS_PLACEHOLDER_P (arg0))
5193 arg0 = save_expr (arg0);
5194 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5200 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5201 real_maxval (&max, neg, mode);
5202 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5203 arg0, build_real (TREE_TYPE (arg0), max)));
5206 /* x < +Inf is always equal to x <= DBL_MAX. */
5207 real_maxval (&max, neg, mode);
5208 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5209 arg0, build_real (TREE_TYPE (arg0), max)));
5212 /* x != +Inf is always equal to !(x > DBL_MAX). */
5213 real_maxval (&max, neg, mode);
5214 if (! HONOR_NANS (mode))
5215 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5216 arg0, build_real (TREE_TYPE (arg0), max)));
5218 /* The transformation below creates non-gimple code and thus is
5219 not appropriate if we are in gimple form. */
5223 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5224 arg0, build_real (TREE_TYPE (arg0), max)));
5225 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5234 /* Subroutine of fold() that optimizes comparisons of a division by
5235 a nonzero integer constant against an integer constant, i.e.
5238 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5239 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5240 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5242 The function returns the constant folded tree if a simplification
5243 can be made, and NULL_TREE otherwise. */
5246 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5248 tree prod, tmp, hi, lo;
5249 tree arg00 = TREE_OPERAND (arg0, 0);
5250 tree arg01 = TREE_OPERAND (arg0, 1);
5251 unsigned HOST_WIDE_INT lpart;
5252 HOST_WIDE_INT hpart;
5255 /* We have to do this the hard way to detect unsigned overflow.
5256 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5257 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5258 TREE_INT_CST_HIGH (arg01),
5259 TREE_INT_CST_LOW (arg1),
5260 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5261 prod = build_int_2 (lpart, hpart);
5262 TREE_TYPE (prod) = TREE_TYPE (arg00);
5263 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5264 || TREE_INT_CST_HIGH (prod) != hpart
5265 || TREE_INT_CST_LOW (prod) != lpart;
5266 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5268 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5270 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5273 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5274 overflow = add_double (TREE_INT_CST_LOW (prod),
5275 TREE_INT_CST_HIGH (prod),
5276 TREE_INT_CST_LOW (tmp),
5277 TREE_INT_CST_HIGH (tmp),
5279 hi = build_int_2 (lpart, hpart);
5280 TREE_TYPE (hi) = TREE_TYPE (arg00);
5281 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5282 || TREE_INT_CST_HIGH (hi) != hpart
5283 || TREE_INT_CST_LOW (hi) != lpart
5284 || TREE_OVERFLOW (prod);
5285 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5287 else if (tree_int_cst_sgn (arg01) >= 0)
5289 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5290 switch (tree_int_cst_sgn (arg1))
5293 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5298 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5303 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5313 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5314 switch (tree_int_cst_sgn (arg1))
5317 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5322 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5327 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5339 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5340 return omit_one_operand (type, integer_zero_node, arg00);
5341 if (TREE_OVERFLOW (hi))
5342 return fold (build2 (GE_EXPR, type, arg00, lo));
5343 if (TREE_OVERFLOW (lo))
5344 return fold (build2 (LE_EXPR, type, arg00, hi));
5345 return build_range_check (type, arg00, 1, lo, hi);
5348 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5349 return omit_one_operand (type, integer_one_node, arg00);
5350 if (TREE_OVERFLOW (hi))
5351 return fold (build2 (LT_EXPR, type, arg00, lo));
5352 if (TREE_OVERFLOW (lo))
5353 return fold (build2 (GT_EXPR, type, arg00, hi));
5354 return build_range_check (type, arg00, 0, lo, hi);
5357 if (TREE_OVERFLOW (lo))
5358 return omit_one_operand (type, integer_zero_node, arg00);
5359 return fold (build2 (LT_EXPR, type, arg00, lo));
5362 if (TREE_OVERFLOW (hi))
5363 return omit_one_operand (type, integer_one_node, arg00);
5364 return fold (build2 (LE_EXPR, type, arg00, hi));
5367 if (TREE_OVERFLOW (hi))
5368 return omit_one_operand (type, integer_zero_node, arg00);
5369 return fold (build2 (GT_EXPR, type, arg00, hi));
5372 if (TREE_OVERFLOW (lo))
5373 return omit_one_operand (type, integer_one_node, arg00);
5374 return fold (build2 (GE_EXPR, type, arg00, lo));
5384 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5385 equality/inequality test, then return a simplified form of
5386 the test using shifts and logical operations. Otherwise return
5387 NULL. TYPE is the desired result type. */
5390 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5393 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5395 if (code == TRUTH_NOT_EXPR)
5397 code = TREE_CODE (arg0);
5398 if (code != NE_EXPR && code != EQ_EXPR)
5401 /* Extract the arguments of the EQ/NE. */
5402 arg1 = TREE_OPERAND (arg0, 1);
5403 arg0 = TREE_OPERAND (arg0, 0);
5405 /* This requires us to invert the code. */
5406 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5409 /* If this is testing a single bit, we can optimize the test. */
5410 if ((code == NE_EXPR || code == EQ_EXPR)
5411 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5412 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5414 tree inner = TREE_OPERAND (arg0, 0);
5415 tree type = TREE_TYPE (arg0);
5416 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5417 enum machine_mode operand_mode = TYPE_MODE (type);
5419 tree signed_type, unsigned_type, intermediate_type;
5422 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5423 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5424 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5425 if (arg00 != NULL_TREE
5426 /* This is only a win if casting to a signed type is cheap,
5427 i.e. when arg00's type is not a partial mode. */
5428 && TYPE_PRECISION (TREE_TYPE (arg00))
5429 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5431 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5432 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5433 result_type, fold_convert (stype, arg00),
5434 fold_convert (stype, integer_zero_node)));
5437 /* Otherwise we have (A & C) != 0 where C is a single bit,
5438 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5439 Similarly for (A & C) == 0. */
5441 /* If INNER is a right shift of a constant and it plus BITNUM does
5442 not overflow, adjust BITNUM and INNER. */
5443 if (TREE_CODE (inner) == RSHIFT_EXPR
5444 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5445 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5446 && bitnum < TYPE_PRECISION (type)
5447 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5448 bitnum - TYPE_PRECISION (type)))
5450 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5451 inner = TREE_OPERAND (inner, 0);
5454 /* If we are going to be able to omit the AND below, we must do our
5455 operations as unsigned. If we must use the AND, we have a choice.
5456 Normally unsigned is faster, but for some machines signed is. */
5457 #ifdef LOAD_EXTEND_OP
5458 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5463 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5464 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5465 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5466 inner = fold_convert (intermediate_type, inner);
5469 inner = build2 (RSHIFT_EXPR, intermediate_type,
5470 inner, size_int (bitnum));
5472 if (code == EQ_EXPR)
5473 inner = build2 (BIT_XOR_EXPR, intermediate_type,
5474 inner, integer_one_node);
5476 /* Put the AND last so it can combine with more things. */
5477 inner = build2 (BIT_AND_EXPR, intermediate_type,
5478 inner, integer_one_node);
5480 /* Make sure to return the proper type. */
5481 inner = fold_convert (result_type, inner);
5488 /* Check whether we are allowed to reorder operands arg0 and arg1,
5489 such that the evaluation of arg1 occurs before arg0. */
5492 reorder_operands_p (tree arg0, tree arg1)
5494 if (! flag_evaluation_order)
5496 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5498 return ! TREE_SIDE_EFFECTS (arg0)
5499 && ! TREE_SIDE_EFFECTS (arg1);
5502 /* Test whether it is preferable two swap two operands, ARG0 and
5503 ARG1, for example because ARG0 is an integer constant and ARG1
5504 isn't. If REORDER is true, only recommend swapping if we can
5505 evaluate the operands in reverse order. */
5508 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5510 STRIP_SIGN_NOPS (arg0);
5511 STRIP_SIGN_NOPS (arg1);
5513 if (TREE_CODE (arg1) == INTEGER_CST)
5515 if (TREE_CODE (arg0) == INTEGER_CST)
5518 if (TREE_CODE (arg1) == REAL_CST)
5520 if (TREE_CODE (arg0) == REAL_CST)
5523 if (TREE_CODE (arg1) == COMPLEX_CST)
5525 if (TREE_CODE (arg0) == COMPLEX_CST)
5528 if (TREE_CONSTANT (arg1))
5530 if (TREE_CONSTANT (arg0))
5536 if (reorder && flag_evaluation_order
5537 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5545 if (reorder && flag_evaluation_order
5546 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5557 /* Perform constant folding and related simplification of EXPR.
5558 The related simplifications include x*1 => x, x*0 => 0, etc.,
5559 and application of the associative law.
5560 NOP_EXPR conversions may be removed freely (as long as we
5561 are careful not to change the type of the overall expression).
5562 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5563 but we can constant-fold them if they have constant operands. */
5565 #ifdef ENABLE_FOLD_CHECKING
5566 # define fold(x) fold_1 (x)
5567 static tree fold_1 (tree);
5573 const tree t = expr;
5574 const tree type = TREE_TYPE (expr);
5575 tree t1 = NULL_TREE;
5577 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5578 enum tree_code code = TREE_CODE (t);
5579 int kind = TREE_CODE_CLASS (code);
5581 /* WINS will be nonzero when the switch is done
5582 if all operands are constant. */
5585 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5586 Likewise for a SAVE_EXPR that's already been evaluated. */
5587 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5590 /* Return right away if a constant. */
5594 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5598 /* Special case for conversion ops that can have fixed point args. */
5599 arg0 = TREE_OPERAND (t, 0);
5601 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5603 STRIP_SIGN_NOPS (arg0);
5605 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5606 subop = TREE_REALPART (arg0);
5610 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5611 && TREE_CODE (subop) != REAL_CST)
5612 /* Note that TREE_CONSTANT isn't enough:
5613 static var addresses are constant but we can't
5614 do arithmetic on them. */
5617 else if (IS_EXPR_CODE_CLASS (kind))
5619 int len = first_rtl_op (code);
5621 for (i = 0; i < len; i++)
5623 tree op = TREE_OPERAND (t, i);
5627 continue; /* Valid for CALL_EXPR, at least. */
5629 /* Strip any conversions that don't change the mode. This is
5630 safe for every expression, except for a comparison expression
5631 because its signedness is derived from its operands. So, in
5632 the latter case, only strip conversions that don't change the
5635 Note that this is done as an internal manipulation within the
5636 constant folder, in order to find the simplest representation
5637 of the arguments so that their form can be studied. In any
5638 cases, the appropriate type conversions should be put back in
5639 the tree that will get out of the constant folder. */
5641 STRIP_SIGN_NOPS (op);
5645 if (TREE_CODE (op) == COMPLEX_CST)
5646 subop = TREE_REALPART (op);
5650 if (TREE_CODE (subop) != INTEGER_CST
5651 && TREE_CODE (subop) != REAL_CST)
5652 /* Note that TREE_CONSTANT isn't enough:
5653 static var addresses are constant but we can't
5654 do arithmetic on them. */
5664 /* If this is a commutative operation, and ARG0 is a constant, move it
5665 to ARG1 to reduce the number of tests below. */
5666 if (commutative_tree_code (code)
5667 && tree_swap_operands_p (arg0, arg1, true))
5668 return fold (build2 (code, type, TREE_OPERAND (t, 1),
5669 TREE_OPERAND (t, 0)));
5671 /* Now WINS is set as described above,
5672 ARG0 is the first operand of EXPR,
5673 and ARG1 is the second operand (if it has more than one operand).
5675 First check for cases where an arithmetic operation is applied to a
5676 compound, conditional, or comparison operation. Push the arithmetic
5677 operation inside the compound or conditional to see if any folding
5678 can then be done. Convert comparison to conditional for this purpose.
5679 The also optimizes non-constant cases that used to be done in
5682 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5683 one of the operands is a comparison and the other is a comparison, a
5684 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5685 code below would make the expression more complex. Change it to a
5686 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5687 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5689 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5690 || code == EQ_EXPR || code == NE_EXPR)
5691 && ((truth_value_p (TREE_CODE (arg0))
5692 && (truth_value_p (TREE_CODE (arg1))
5693 || (TREE_CODE (arg1) == BIT_AND_EXPR
5694 && integer_onep (TREE_OPERAND (arg1, 1)))))
5695 || (truth_value_p (TREE_CODE (arg1))
5696 && (truth_value_p (TREE_CODE (arg0))
5697 || (TREE_CODE (arg0) == BIT_AND_EXPR
5698 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5700 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5701 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5703 type, fold_convert (boolean_type_node, arg0),
5704 fold_convert (boolean_type_node, arg1)));
5706 if (code == EQ_EXPR)
5707 tem = invert_truthvalue (tem);
5712 if (TREE_CODE_CLASS (code) == '1')
5714 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5715 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5716 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5717 else if (TREE_CODE (arg0) == COND_EXPR)
5719 tree arg01 = TREE_OPERAND (arg0, 1);
5720 tree arg02 = TREE_OPERAND (arg0, 2);
5721 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5722 arg01 = fold (build1 (code, type, arg01));
5723 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5724 arg02 = fold (build1 (code, type, arg02));
5725 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5728 /* If this was a conversion, and all we did was to move into
5729 inside the COND_EXPR, bring it back out. But leave it if
5730 it is a conversion from integer to integer and the
5731 result precision is no wider than a word since such a
5732 conversion is cheap and may be optimized away by combine,
5733 while it couldn't if it were outside the COND_EXPR. Then return
5734 so we don't get into an infinite recursion loop taking the
5735 conversion out and then back in. */
5737 if ((code == NOP_EXPR || code == CONVERT_EXPR
5738 || code == NON_LVALUE_EXPR)
5739 && TREE_CODE (tem) == COND_EXPR
5740 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
5741 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
5742 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
5743 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
5744 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
5745 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
5746 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
5748 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
5749 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
5750 tem = build1 (code, type,
5752 TREE_TYPE (TREE_OPERAND
5753 (TREE_OPERAND (tem, 1), 0)),
5754 TREE_OPERAND (tem, 0),
5755 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
5756 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
5759 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5761 if (TREE_CODE (type) == BOOLEAN_TYPE)
5763 arg0 = copy_node (arg0);
5764 TREE_TYPE (arg0) = type;
5767 else if (TREE_CODE (type) != INTEGER_TYPE)
5768 return fold (build3 (COND_EXPR, type, arg0,
5769 fold (build1 (code, type,
5771 fold (build1 (code, type,
5772 integer_zero_node))));
5775 else if (TREE_CODE_CLASS (code) == '<'
5776 && TREE_CODE (arg0) == COMPOUND_EXPR)
5777 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5778 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
5779 else if (TREE_CODE_CLASS (code) == '<'
5780 && TREE_CODE (arg1) == COMPOUND_EXPR)
5781 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5782 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
5783 else if (TREE_CODE_CLASS (code) == '2'
5784 || TREE_CODE_CLASS (code) == '<')
5786 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5787 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5788 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
5790 if (TREE_CODE (arg1) == COMPOUND_EXPR
5791 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
5792 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5793 fold (build2 (code, type,
5794 arg0, TREE_OPERAND (arg1, 1))));
5796 if (TREE_CODE (arg0) == COND_EXPR
5797 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5799 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5800 /*cond_first_p=*/1);
5801 if (tem != NULL_TREE)
5805 if (TREE_CODE (arg1) == COND_EXPR
5806 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
5808 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5809 /*cond_first_p=*/0);
5810 if (tem != NULL_TREE)
5818 return fold (DECL_INITIAL (t));
5823 case FIX_TRUNC_EXPR:
5825 case FIX_FLOOR_EXPR:
5826 case FIX_ROUND_EXPR:
5827 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
5828 return TREE_OPERAND (t, 0);
5830 /* Handle cases of two conversions in a row. */
5831 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5832 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5834 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5835 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5836 int inside_int = INTEGRAL_TYPE_P (inside_type);
5837 int inside_ptr = POINTER_TYPE_P (inside_type);
5838 int inside_float = FLOAT_TYPE_P (inside_type);
5839 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5840 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
5841 int inter_int = INTEGRAL_TYPE_P (inter_type);
5842 int inter_ptr = POINTER_TYPE_P (inter_type);
5843 int inter_float = FLOAT_TYPE_P (inter_type);
5844 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5845 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
5846 int final_int = INTEGRAL_TYPE_P (type);
5847 int final_ptr = POINTER_TYPE_P (type);
5848 int final_float = FLOAT_TYPE_P (type);
5849 unsigned int final_prec = TYPE_PRECISION (type);
5850 int final_unsignedp = TYPE_UNSIGNED (type);
5852 /* In addition to the cases of two conversions in a row
5853 handled below, if we are converting something to its own
5854 type via an object of identical or wider precision, neither
5855 conversion is needed. */
5856 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
5857 && ((inter_int && final_int) || (inter_float && final_float))
5858 && inter_prec >= final_prec)
5859 return fold (build1 (code, type,
5860 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5862 /* Likewise, if the intermediate and final types are either both
5863 float or both integer, we don't need the middle conversion if
5864 it is wider than the final type and doesn't change the signedness
5865 (for integers). Avoid this if the final type is a pointer
5866 since then we sometimes need the inner conversion. Likewise if
5867 the outer has a precision not equal to the size of its mode. */
5868 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5869 || (inter_float && inside_float))
5870 && inter_prec >= inside_prec
5871 && (inter_float || inter_unsignedp == inside_unsignedp)
5872 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5873 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5875 return fold (build1 (code, type,
5876 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5878 /* If we have a sign-extension of a zero-extended value, we can
5879 replace that by a single zero-extension. */
5880 if (inside_int && inter_int && final_int
5881 && inside_prec < inter_prec && inter_prec < final_prec
5882 && inside_unsignedp && !inter_unsignedp)
5883 return fold (build1 (code, type,
5884 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5886 /* Two conversions in a row are not needed unless:
5887 - some conversion is floating-point (overstrict for now), or
5888 - the intermediate type is narrower than both initial and
5890 - the intermediate type and innermost type differ in signedness,
5891 and the outermost type is wider than the intermediate, or
5892 - the initial type is a pointer type and the precisions of the
5893 intermediate and final types differ, or
5894 - the final type is a pointer type and the precisions of the
5895 initial and intermediate types differ. */
5896 if (! inside_float && ! inter_float && ! final_float
5897 && (inter_prec > inside_prec || inter_prec > final_prec)
5898 && ! (inside_int && inter_int
5899 && inter_unsignedp != inside_unsignedp
5900 && inter_prec < final_prec)
5901 && ((inter_unsignedp && inter_prec > inside_prec)
5902 == (final_unsignedp && final_prec > inter_prec))
5903 && ! (inside_ptr && inter_prec != final_prec)
5904 && ! (final_ptr && inside_prec != inter_prec)
5905 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
5906 && TYPE_MODE (type) == TYPE_MODE (inter_type))
5908 return fold (build1 (code, type,
5909 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5912 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5913 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5914 /* Detect assigning a bitfield. */
5915 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5916 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5918 /* Don't leave an assignment inside a conversion
5919 unless assigning a bitfield. */
5920 tree prev = TREE_OPERAND (t, 0);
5921 tem = copy_node (t);
5922 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
5923 /* First do the assignment, then return converted constant. */
5924 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
5925 TREE_NO_WARNING (tem) = 1;
5926 TREE_USED (tem) = 1;
5930 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5931 constants (if x has signed type, the sign bit cannot be set
5932 in c). This folds extension into the BIT_AND_EXPR. */
5933 if (INTEGRAL_TYPE_P (type)
5934 && TREE_CODE (type) != BOOLEAN_TYPE
5935 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5936 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5938 tree and = TREE_OPERAND (t, 0);
5939 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5942 if (TYPE_UNSIGNED (TREE_TYPE (and))
5943 || (TYPE_PRECISION (type)
5944 <= TYPE_PRECISION (TREE_TYPE (and))))
5946 else if (TYPE_PRECISION (TREE_TYPE (and1))
5947 <= HOST_BITS_PER_WIDE_INT
5948 && host_integerp (and1, 1))
5950 unsigned HOST_WIDE_INT cst;
5952 cst = tree_low_cst (and1, 1);
5953 cst &= (HOST_WIDE_INT) -1
5954 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5955 change = (cst == 0);
5956 #ifdef LOAD_EXTEND_OP
5958 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5961 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5962 and0 = fold_convert (uns, and0);
5963 and1 = fold_convert (uns, and1);
5968 return fold (build2 (BIT_AND_EXPR, type,
5969 fold_convert (type, and0),
5970 fold_convert (type, and1)));
5973 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
5974 T2 being pointers to types of the same size. */
5975 if (POINTER_TYPE_P (TREE_TYPE (t))
5976 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
5977 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
5978 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5980 tree arg00 = TREE_OPERAND (arg0, 0);
5981 tree t0 = TREE_TYPE (t);
5982 tree t1 = TREE_TYPE (arg00);
5983 tree tt0 = TREE_TYPE (t0);
5984 tree tt1 = TREE_TYPE (t1);
5985 tree s0 = TYPE_SIZE (tt0);
5986 tree s1 = TYPE_SIZE (tt1);
5988 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
5989 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
5990 TREE_OPERAND (arg0, 1));
5993 tem = fold_convert_const (code, type, arg0);
5994 return tem ? tem : t;
5996 case VIEW_CONVERT_EXPR:
5997 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5998 return build1 (VIEW_CONVERT_EXPR, type,
5999 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6003 if (TREE_CODE (arg0) == CONSTRUCTOR
6004 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6006 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6008 return TREE_VALUE (m);
6013 if (TREE_CONSTANT (t) != wins)
6015 tem = copy_node (t);
6016 TREE_CONSTANT (tem) = wins;
6017 TREE_INVARIANT (tem) = wins;
6023 if (negate_expr_p (arg0))
6024 return fold_convert (type, negate_expr (arg0));
6028 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6029 return fold_abs_const (arg0, type);
6030 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6031 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6032 /* Convert fabs((double)float) into (double)fabsf(float). */
6033 else if (TREE_CODE (arg0) == NOP_EXPR
6034 && TREE_CODE (type) == REAL_TYPE)
6036 tree targ0 = strip_float_extensions (arg0);
6038 return fold_convert (type, fold (build1 (ABS_EXPR,
6042 else if (tree_expr_nonnegative_p (arg0))
6047 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6048 return fold_convert (type, arg0);
6049 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6050 return build2 (COMPLEX_EXPR, type,
6051 TREE_OPERAND (arg0, 0),
6052 negate_expr (TREE_OPERAND (arg0, 1)));
6053 else if (TREE_CODE (arg0) == COMPLEX_CST)
6054 return build_complex (type, TREE_REALPART (arg0),
6055 negate_expr (TREE_IMAGPART (arg0)));
6056 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6057 return fold (build2 (TREE_CODE (arg0), type,
6058 fold (build1 (CONJ_EXPR, type,
6059 TREE_OPERAND (arg0, 0))),
6060 fold (build1 (CONJ_EXPR, type,
6061 TREE_OPERAND (arg0, 1)))));
6062 else if (TREE_CODE (arg0) == CONJ_EXPR)
6063 return TREE_OPERAND (arg0, 0);
6067 if (TREE_CODE (arg0) == INTEGER_CST)
6068 return fold_not_const (arg0, type);
6069 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6070 return TREE_OPERAND (arg0, 0);
6074 /* A + (-B) -> A - B */
6075 if (TREE_CODE (arg1) == NEGATE_EXPR)
6076 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6077 /* (-A) + B -> B - A */
6078 if (TREE_CODE (arg0) == NEGATE_EXPR
6079 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6080 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6081 if (! FLOAT_TYPE_P (type))
6083 if (integer_zerop (arg1))
6084 return non_lvalue (fold_convert (type, arg0));
6086 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6087 with a constant, and the two constants have no bits in common,
6088 we should treat this as a BIT_IOR_EXPR since this may produce more
6090 if (TREE_CODE (arg0) == BIT_AND_EXPR
6091 && TREE_CODE (arg1) == BIT_AND_EXPR
6092 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6093 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6094 && integer_zerop (const_binop (BIT_AND_EXPR,
6095 TREE_OPERAND (arg0, 1),
6096 TREE_OPERAND (arg1, 1), 0)))
6098 code = BIT_IOR_EXPR;
6102 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6103 (plus (plus (mult) (mult)) (foo)) so that we can
6104 take advantage of the factoring cases below. */
6105 if ((TREE_CODE (arg0) == PLUS_EXPR
6106 && TREE_CODE (arg1) == MULT_EXPR)
6107 || (TREE_CODE (arg1) == PLUS_EXPR
6108 && TREE_CODE (arg0) == MULT_EXPR))
6110 tree parg0, parg1, parg, marg;
6112 if (TREE_CODE (arg0) == PLUS_EXPR)
6113 parg = arg0, marg = arg1;
6115 parg = arg1, marg = arg0;
6116 parg0 = TREE_OPERAND (parg, 0);
6117 parg1 = TREE_OPERAND (parg, 1);
6121 if (TREE_CODE (parg0) == MULT_EXPR
6122 && TREE_CODE (parg1) != MULT_EXPR)
6123 return fold (build2 (PLUS_EXPR, type,
6124 fold (build2 (PLUS_EXPR, type,
6125 fold_convert (type, parg0),
6126 fold_convert (type, marg))),
6127 fold_convert (type, parg1)));
6128 if (TREE_CODE (parg0) != MULT_EXPR
6129 && TREE_CODE (parg1) == MULT_EXPR)
6130 return fold (build2 (PLUS_EXPR, type,
6131 fold (build2 (PLUS_EXPR, type,
6132 fold_convert (type, parg1),
6133 fold_convert (type, marg))),
6134 fold_convert (type, parg0)));
6137 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6139 tree arg00, arg01, arg10, arg11;
6140 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6142 /* (A * C) + (B * C) -> (A+B) * C.
6143 We are most concerned about the case where C is a constant,
6144 but other combinations show up during loop reduction. Since
6145 it is not difficult, try all four possibilities. */
6147 arg00 = TREE_OPERAND (arg0, 0);
6148 arg01 = TREE_OPERAND (arg0, 1);
6149 arg10 = TREE_OPERAND (arg1, 0);
6150 arg11 = TREE_OPERAND (arg1, 1);
6153 if (operand_equal_p (arg01, arg11, 0))
6154 same = arg01, alt0 = arg00, alt1 = arg10;
6155 else if (operand_equal_p (arg00, arg10, 0))
6156 same = arg00, alt0 = arg01, alt1 = arg11;
6157 else if (operand_equal_p (arg00, arg11, 0))
6158 same = arg00, alt0 = arg01, alt1 = arg10;
6159 else if (operand_equal_p (arg01, arg10, 0))
6160 same = arg01, alt0 = arg00, alt1 = arg11;
6162 /* No identical multiplicands; see if we can find a common
6163 power-of-two factor in non-power-of-two multiplies. This
6164 can help in multi-dimensional array access. */
6165 else if (TREE_CODE (arg01) == INTEGER_CST
6166 && TREE_CODE (arg11) == INTEGER_CST
6167 && TREE_INT_CST_HIGH (arg01) == 0
6168 && TREE_INT_CST_HIGH (arg11) == 0)
6170 HOST_WIDE_INT int01, int11, tmp;
6171 int01 = TREE_INT_CST_LOW (arg01);
6172 int11 = TREE_INT_CST_LOW (arg11);
6174 /* Move min of absolute values to int11. */
6175 if ((int01 >= 0 ? int01 : -int01)
6176 < (int11 >= 0 ? int11 : -int11))
6178 tmp = int01, int01 = int11, int11 = tmp;
6179 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6180 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6183 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6185 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6186 build_int_2 (int01 / int11, 0)));
6193 return fold (build2 (MULT_EXPR, type,
6194 fold (build2 (PLUS_EXPR, type,
6201 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6202 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6203 return non_lvalue (fold_convert (type, arg0));
6205 /* Likewise if the operands are reversed. */
6206 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6207 return non_lvalue (fold_convert (type, arg1));
6209 /* Convert x+x into x*2.0. */
6210 if (operand_equal_p (arg0, arg1, 0)
6211 && SCALAR_FLOAT_TYPE_P (type))
6212 return fold (build2 (MULT_EXPR, type, arg0,
6213 build_real (type, dconst2)));
6215 /* Convert x*c+x into x*(c+1). */
6216 if (flag_unsafe_math_optimizations
6217 && TREE_CODE (arg0) == MULT_EXPR
6218 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6219 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6220 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6224 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6225 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6226 return fold (build2 (MULT_EXPR, type, arg1,
6227 build_real (type, c)));
6230 /* Convert x+x*c into x*(c+1). */
6231 if (flag_unsafe_math_optimizations
6232 && TREE_CODE (arg1) == MULT_EXPR
6233 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6234 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6235 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6239 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6240 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6241 return fold (build2 (MULT_EXPR, type, arg0,
6242 build_real (type, c)));
6245 /* Convert x*c1+x*c2 into x*(c1+c2). */
6246 if (flag_unsafe_math_optimizations
6247 && TREE_CODE (arg0) == MULT_EXPR
6248 && TREE_CODE (arg1) == MULT_EXPR
6249 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6250 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6251 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6252 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6253 && operand_equal_p (TREE_OPERAND (arg0, 0),
6254 TREE_OPERAND (arg1, 0), 0))
6256 REAL_VALUE_TYPE c1, c2;
6258 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6259 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6260 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6261 return fold (build2 (MULT_EXPR, type,
6262 TREE_OPERAND (arg0, 0),
6263 build_real (type, c1)));
6265 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6266 if (flag_unsafe_math_optimizations
6267 && TREE_CODE (arg1) == PLUS_EXPR
6268 && TREE_CODE (arg0) != MULT_EXPR)
6270 tree tree10 = TREE_OPERAND (arg1, 0);
6271 tree tree11 = TREE_OPERAND (arg1, 1);
6272 if (TREE_CODE (tree11) == MULT_EXPR
6273 && TREE_CODE (tree10) == MULT_EXPR)
6276 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6277 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6280 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6281 if (flag_unsafe_math_optimizations
6282 && TREE_CODE (arg0) == PLUS_EXPR
6283 && TREE_CODE (arg1) != MULT_EXPR)
6285 tree tree00 = TREE_OPERAND (arg0, 0);
6286 tree tree01 = TREE_OPERAND (arg0, 1);
6287 if (TREE_CODE (tree01) == MULT_EXPR
6288 && TREE_CODE (tree00) == MULT_EXPR)
6291 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6292 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6298 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6299 is a rotate of A by C1 bits. */
6300 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6301 is a rotate of A by B bits. */
6303 enum tree_code code0, code1;
6304 code0 = TREE_CODE (arg0);
6305 code1 = TREE_CODE (arg1);
6306 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6307 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6308 && operand_equal_p (TREE_OPERAND (arg0, 0),
6309 TREE_OPERAND (arg1, 0), 0)
6310 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6312 tree tree01, tree11;
6313 enum tree_code code01, code11;
6315 tree01 = TREE_OPERAND (arg0, 1);
6316 tree11 = TREE_OPERAND (arg1, 1);
6317 STRIP_NOPS (tree01);
6318 STRIP_NOPS (tree11);
6319 code01 = TREE_CODE (tree01);
6320 code11 = TREE_CODE (tree11);
6321 if (code01 == INTEGER_CST
6322 && code11 == INTEGER_CST
6323 && TREE_INT_CST_HIGH (tree01) == 0
6324 && TREE_INT_CST_HIGH (tree11) == 0
6325 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6326 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6327 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6328 code0 == LSHIFT_EXPR ? tree01 : tree11);
6329 else if (code11 == MINUS_EXPR)
6331 tree tree110, tree111;
6332 tree110 = TREE_OPERAND (tree11, 0);
6333 tree111 = TREE_OPERAND (tree11, 1);
6334 STRIP_NOPS (tree110);
6335 STRIP_NOPS (tree111);
6336 if (TREE_CODE (tree110) == INTEGER_CST
6337 && 0 == compare_tree_int (tree110,
6339 (TREE_TYPE (TREE_OPERAND
6341 && operand_equal_p (tree01, tree111, 0))
6342 return build2 ((code0 == LSHIFT_EXPR
6345 type, TREE_OPERAND (arg0, 0), tree01);
6347 else if (code01 == MINUS_EXPR)
6349 tree tree010, tree011;
6350 tree010 = TREE_OPERAND (tree01, 0);
6351 tree011 = TREE_OPERAND (tree01, 1);
6352 STRIP_NOPS (tree010);
6353 STRIP_NOPS (tree011);
6354 if (TREE_CODE (tree010) == INTEGER_CST
6355 && 0 == compare_tree_int (tree010,
6357 (TREE_TYPE (TREE_OPERAND
6359 && operand_equal_p (tree11, tree011, 0))
6360 return build2 ((code0 != LSHIFT_EXPR
6363 type, TREE_OPERAND (arg0, 0), tree11);
6369 /* In most languages, can't associate operations on floats through
6370 parentheses. Rather than remember where the parentheses were, we
6371 don't associate floats at all, unless the user has specified
6372 -funsafe-math-optimizations. */
6375 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6377 tree var0, con0, lit0, minus_lit0;
6378 tree var1, con1, lit1, minus_lit1;
6380 /* Split both trees into variables, constants, and literals. Then
6381 associate each group together, the constants with literals,
6382 then the result with variables. This increases the chances of
6383 literals being recombined later and of generating relocatable
6384 expressions for the sum of a constant and literal. */
6385 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6386 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6387 code == MINUS_EXPR);
6389 /* Only do something if we found more than two objects. Otherwise,
6390 nothing has changed and we risk infinite recursion. */
6391 if (2 < ((var0 != 0) + (var1 != 0)
6392 + (con0 != 0) + (con1 != 0)
6393 + (lit0 != 0) + (lit1 != 0)
6394 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6396 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6397 if (code == MINUS_EXPR)
6400 var0 = associate_trees (var0, var1, code, type);
6401 con0 = associate_trees (con0, con1, code, type);
6402 lit0 = associate_trees (lit0, lit1, code, type);
6403 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6405 /* Preserve the MINUS_EXPR if the negative part of the literal is
6406 greater than the positive part. Otherwise, the multiplicative
6407 folding code (i.e extract_muldiv) may be fooled in case
6408 unsigned constants are subtracted, like in the following
6409 example: ((X*2 + 4) - 8U)/2. */
6410 if (minus_lit0 && lit0)
6412 if (TREE_CODE (lit0) == INTEGER_CST
6413 && TREE_CODE (minus_lit0) == INTEGER_CST
6414 && tree_int_cst_lt (lit0, minus_lit0))
6416 minus_lit0 = associate_trees (minus_lit0, lit0,
6422 lit0 = associate_trees (lit0, minus_lit0,
6430 return fold_convert (type,
6431 associate_trees (var0, minus_lit0,
6435 con0 = associate_trees (con0, minus_lit0,
6437 return fold_convert (type,
6438 associate_trees (var0, con0,
6443 con0 = associate_trees (con0, lit0, code, type);
6444 return fold_convert (type, associate_trees (var0, con0,
6451 t1 = const_binop (code, arg0, arg1, 0);
6452 if (t1 != NULL_TREE)
6454 /* The return value should always have
6455 the same type as the original expression. */
6456 if (TREE_TYPE (t1) != type)
6457 t1 = fold_convert (type, t1);
6464 /* A - (-B) -> A + B */
6465 if (TREE_CODE (arg1) == NEGATE_EXPR)
6466 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6467 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6468 if (TREE_CODE (arg0) == NEGATE_EXPR
6469 && (FLOAT_TYPE_P (type)
6470 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6471 && negate_expr_p (arg1)
6472 && reorder_operands_p (arg0, arg1))
6473 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6474 TREE_OPERAND (arg0, 0)));
6476 if (! FLOAT_TYPE_P (type))
6478 if (! wins && integer_zerop (arg0))
6479 return negate_expr (fold_convert (type, arg1));
6480 if (integer_zerop (arg1))
6481 return non_lvalue (fold_convert (type, arg0));
6483 /* Fold A - (A & B) into ~B & A. */
6484 if (!TREE_SIDE_EFFECTS (arg0)
6485 && TREE_CODE (arg1) == BIT_AND_EXPR)
6487 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6488 return fold (build2 (BIT_AND_EXPR, type,
6489 fold (build1 (BIT_NOT_EXPR, type,
6490 TREE_OPERAND (arg1, 0))),
6492 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6493 return fold (build2 (BIT_AND_EXPR, type,
6494 fold (build1 (BIT_NOT_EXPR, type,
6495 TREE_OPERAND (arg1, 1))),
6499 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6500 any power of 2 minus 1. */
6501 if (TREE_CODE (arg0) == BIT_AND_EXPR
6502 && TREE_CODE (arg1) == BIT_AND_EXPR
6503 && operand_equal_p (TREE_OPERAND (arg0, 0),
6504 TREE_OPERAND (arg1, 0), 0))
6506 tree mask0 = TREE_OPERAND (arg0, 1);
6507 tree mask1 = TREE_OPERAND (arg1, 1);
6508 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6510 if (operand_equal_p (tem, mask1, 0))
6512 tem = fold (build2 (BIT_XOR_EXPR, type,
6513 TREE_OPERAND (arg0, 0), mask1));
6514 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6519 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6520 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6521 return non_lvalue (fold_convert (type, arg0));
6523 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6524 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6525 (-ARG1 + ARG0) reduces to -ARG1. */
6526 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6527 return negate_expr (fold_convert (type, arg1));
6529 /* Fold &x - &x. This can happen from &x.foo - &x.
6530 This is unsafe for certain floats even in non-IEEE formats.
6531 In IEEE, it is unsafe because it does wrong for NaNs.
6532 Also note that operand_equal_p is always false if an operand
6535 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6536 && operand_equal_p (arg0, arg1, 0))
6537 return fold_convert (type, integer_zero_node);
6539 /* A - B -> A + (-B) if B is easily negatable. */
6540 if (!wins && negate_expr_p (arg1)
6541 && (FLOAT_TYPE_P (type)
6542 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6543 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6545 if (TREE_CODE (arg0) == MULT_EXPR
6546 && TREE_CODE (arg1) == MULT_EXPR
6547 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6549 /* (A * C) - (B * C) -> (A-B) * C. */
6550 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6551 TREE_OPERAND (arg1, 1), 0))
6552 return fold (build2 (MULT_EXPR, type,
6553 fold (build2 (MINUS_EXPR, type,
6554 TREE_OPERAND (arg0, 0),
6555 TREE_OPERAND (arg1, 0))),
6556 TREE_OPERAND (arg0, 1)));
6557 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6558 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6559 TREE_OPERAND (arg1, 0), 0))
6560 return fold (build2 (MULT_EXPR, type,
6561 TREE_OPERAND (arg0, 0),
6562 fold (build2 (MINUS_EXPR, type,
6563 TREE_OPERAND (arg0, 1),
6564 TREE_OPERAND (arg1, 1)))));
6570 /* (-A) * (-B) -> A * B */
6571 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6572 return fold (build2 (MULT_EXPR, type,
6573 TREE_OPERAND (arg0, 0),
6574 negate_expr (arg1)));
6575 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6576 return fold (build2 (MULT_EXPR, type,
6578 TREE_OPERAND (arg1, 0)));
6580 if (! FLOAT_TYPE_P (type))
6582 if (integer_zerop (arg1))
6583 return omit_one_operand (type, arg1, arg0);
6584 if (integer_onep (arg1))
6585 return non_lvalue (fold_convert (type, arg0));
6587 /* (a * (1 << b)) is (a << b) */
6588 if (TREE_CODE (arg1) == LSHIFT_EXPR
6589 && integer_onep (TREE_OPERAND (arg1, 0)))
6590 return fold (build2 (LSHIFT_EXPR, type, arg0,
6591 TREE_OPERAND (arg1, 1)));
6592 if (TREE_CODE (arg0) == LSHIFT_EXPR
6593 && integer_onep (TREE_OPERAND (arg0, 0)))
6594 return fold (build2 (LSHIFT_EXPR, type, arg1,
6595 TREE_OPERAND (arg0, 1)));
6597 if (TREE_CODE (arg1) == INTEGER_CST
6598 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6599 fold_convert (type, arg1),
6601 return fold_convert (type, tem);
6606 /* Maybe fold x * 0 to 0. The expressions aren't the same
6607 when x is NaN, since x * 0 is also NaN. Nor are they the
6608 same in modes with signed zeros, since multiplying a
6609 negative value by 0 gives -0, not +0. */
6610 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6611 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6612 && real_zerop (arg1))
6613 return omit_one_operand (type, arg1, arg0);
6614 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6615 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6616 && real_onep (arg1))
6617 return non_lvalue (fold_convert (type, arg0));
6619 /* Transform x * -1.0 into -x. */
6620 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6621 && real_minus_onep (arg1))
6622 return fold_convert (type, negate_expr (arg0));
6624 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6625 if (flag_unsafe_math_optimizations
6626 && TREE_CODE (arg0) == RDIV_EXPR
6627 && TREE_CODE (arg1) == REAL_CST
6628 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6630 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6633 return fold (build2 (RDIV_EXPR, type, tem,
6634 TREE_OPERAND (arg0, 1)));
6637 if (flag_unsafe_math_optimizations)
6639 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6640 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6642 /* Optimizations of root(...)*root(...). */
6643 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
6645 tree rootfn, arg, arglist;
6646 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6647 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6649 /* Optimize sqrt(x)*sqrt(x) as x. */
6650 if (BUILTIN_SQRT_P (fcode0)
6651 && operand_equal_p (arg00, arg10, 0)
6652 && ! HONOR_SNANS (TYPE_MODE (type)))
6655 /* Optimize root(x)*root(y) as root(x*y). */
6656 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6657 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
6658 arglist = build_tree_list (NULL_TREE, arg);
6659 return build_function_call_expr (rootfn, arglist);
6662 /* Optimize expN(x)*expN(y) as expN(x+y). */
6663 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6665 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6666 tree arg = build2 (PLUS_EXPR, type,
6667 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6668 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6669 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6670 return build_function_call_expr (expfn, arglist);
6673 /* Optimizations of pow(...)*pow(...). */
6674 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6675 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6676 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6678 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6679 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6681 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6682 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6685 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6686 if (operand_equal_p (arg01, arg11, 0))
6688 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6689 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
6690 tree arglist = tree_cons (NULL_TREE, fold (arg),
6691 build_tree_list (NULL_TREE,
6693 return build_function_call_expr (powfn, arglist);
6696 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6697 if (operand_equal_p (arg00, arg10, 0))
6699 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6700 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
6701 tree arglist = tree_cons (NULL_TREE, arg00,
6702 build_tree_list (NULL_TREE,
6704 return build_function_call_expr (powfn, arglist);
6708 /* Optimize tan(x)*cos(x) as sin(x). */
6709 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6710 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6711 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6712 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6713 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6714 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6715 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6716 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6718 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
6720 if (sinfn != NULL_TREE)
6721 return build_function_call_expr (sinfn,
6722 TREE_OPERAND (arg0, 1));
6725 /* Optimize x*pow(x,c) as pow(x,c+1). */
6726 if (fcode1 == BUILT_IN_POW
6727 || fcode1 == BUILT_IN_POWF
6728 || fcode1 == BUILT_IN_POWL)
6730 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6731 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6733 if (TREE_CODE (arg11) == REAL_CST
6734 && ! TREE_CONSTANT_OVERFLOW (arg11)
6735 && operand_equal_p (arg0, arg10, 0))
6737 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6741 c = TREE_REAL_CST (arg11);
6742 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6743 arg = build_real (type, c);
6744 arglist = build_tree_list (NULL_TREE, arg);
6745 arglist = tree_cons (NULL_TREE, arg0, arglist);
6746 return build_function_call_expr (powfn, arglist);
6750 /* Optimize pow(x,c)*x as pow(x,c+1). */
6751 if (fcode0 == BUILT_IN_POW
6752 || fcode0 == BUILT_IN_POWF
6753 || fcode0 == BUILT_IN_POWL)
6755 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6756 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6758 if (TREE_CODE (arg01) == REAL_CST
6759 && ! TREE_CONSTANT_OVERFLOW (arg01)
6760 && operand_equal_p (arg1, arg00, 0))
6762 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6766 c = TREE_REAL_CST (arg01);
6767 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6768 arg = build_real (type, c);
6769 arglist = build_tree_list (NULL_TREE, arg);
6770 arglist = tree_cons (NULL_TREE, arg1, arglist);
6771 return build_function_call_expr (powfn, arglist);
6775 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6777 && operand_equal_p (arg0, arg1, 0))
6779 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
6783 tree arg = build_real (type, dconst2);
6784 tree arglist = build_tree_list (NULL_TREE, arg);
6785 arglist = tree_cons (NULL_TREE, arg0, arglist);
6786 return build_function_call_expr (powfn, arglist);
6795 if (integer_all_onesp (arg1))
6796 return omit_one_operand (type, arg1, arg0);
6797 if (integer_zerop (arg1))
6798 return non_lvalue (fold_convert (type, arg0));
6799 if (operand_equal_p (arg0, arg1, 0))
6800 return non_lvalue (fold_convert (type, arg0));
6801 t1 = distribute_bit_expr (code, type, arg0, arg1);
6802 if (t1 != NULL_TREE)
6805 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6807 This results in more efficient code for machines without a NAND
6808 instruction. Combine will canonicalize to the first form
6809 which will allow use of NAND instructions provided by the
6810 backend if they exist. */
6811 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6812 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6814 return fold (build1 (BIT_NOT_EXPR, type,
6815 build2 (BIT_AND_EXPR, type,
6816 TREE_OPERAND (arg0, 0),
6817 TREE_OPERAND (arg1, 0))));
6820 /* See if this can be simplified into a rotate first. If that
6821 is unsuccessful continue in the association code. */
6825 if (integer_zerop (arg1))
6826 return non_lvalue (fold_convert (type, arg0));
6827 if (integer_all_onesp (arg1))
6828 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6829 if (operand_equal_p (arg0, arg1, 0))
6830 return omit_one_operand (type, integer_zero_node, arg0);
6832 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6833 with a constant, and the two constants have no bits in common,
6834 we should treat this as a BIT_IOR_EXPR since this may produce more
6836 if (TREE_CODE (arg0) == BIT_AND_EXPR
6837 && TREE_CODE (arg1) == BIT_AND_EXPR
6838 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6839 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6840 && integer_zerop (const_binop (BIT_AND_EXPR,
6841 TREE_OPERAND (arg0, 1),
6842 TREE_OPERAND (arg1, 1), 0)))
6844 code = BIT_IOR_EXPR;
6848 /* See if this can be simplified into a rotate first. If that
6849 is unsuccessful continue in the association code. */
6853 if (integer_all_onesp (arg1))
6854 return non_lvalue (fold_convert (type, arg0));
6855 if (integer_zerop (arg1))
6856 return omit_one_operand (type, arg1, arg0);
6857 if (operand_equal_p (arg0, arg1, 0))
6858 return non_lvalue (fold_convert (type, arg0));
6859 t1 = distribute_bit_expr (code, type, arg0, arg1);
6860 if (t1 != NULL_TREE)
6862 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6863 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6864 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6867 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6869 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6870 && (~TREE_INT_CST_LOW (arg1)
6871 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6872 return fold_convert (type, TREE_OPERAND (arg0, 0));
6875 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6877 This results in more efficient code for machines without a NOR
6878 instruction. Combine will canonicalize to the first form
6879 which will allow use of NOR instructions provided by the
6880 backend if they exist. */
6881 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6882 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6884 return fold (build1 (BIT_NOT_EXPR, type,
6885 build2 (BIT_IOR_EXPR, type,
6886 TREE_OPERAND (arg0, 0),
6887 TREE_OPERAND (arg1, 0))));
6893 /* Don't touch a floating-point divide by zero unless the mode
6894 of the constant can represent infinity. */
6895 if (TREE_CODE (arg1) == REAL_CST
6896 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6897 && real_zerop (arg1))
6900 /* (-A) / (-B) -> A / B */
6901 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6902 return fold (build2 (RDIV_EXPR, type,
6903 TREE_OPERAND (arg0, 0),
6904 negate_expr (arg1)));
6905 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6906 return fold (build2 (RDIV_EXPR, type,
6908 TREE_OPERAND (arg1, 0)));
6910 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6911 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6912 && real_onep (arg1))
6913 return non_lvalue (fold_convert (type, arg0));
6915 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6916 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6917 && real_minus_onep (arg1))
6918 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6920 /* If ARG1 is a constant, we can convert this to a multiply by the
6921 reciprocal. This does not have the same rounding properties,
6922 so only do this if -funsafe-math-optimizations. We can actually
6923 always safely do it if ARG1 is a power of two, but it's hard to
6924 tell if it is or not in a portable manner. */
6925 if (TREE_CODE (arg1) == REAL_CST)
6927 if (flag_unsafe_math_optimizations
6928 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6930 return fold (build2 (MULT_EXPR, type, arg0, tem));
6931 /* Find the reciprocal if optimizing and the result is exact. */
6935 r = TREE_REAL_CST (arg1);
6936 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6938 tem = build_real (type, r);
6939 return fold (build2 (MULT_EXPR, type, arg0, tem));
6943 /* Convert A/B/C to A/(B*C). */
6944 if (flag_unsafe_math_optimizations
6945 && TREE_CODE (arg0) == RDIV_EXPR)
6946 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6947 fold (build2 (MULT_EXPR, type,
6948 TREE_OPERAND (arg0, 1), arg1))));
6950 /* Convert A/(B/C) to (A/B)*C. */
6951 if (flag_unsafe_math_optimizations
6952 && TREE_CODE (arg1) == RDIV_EXPR)
6953 return fold (build2 (MULT_EXPR, type,
6954 fold (build2 (RDIV_EXPR, type, arg0,
6955 TREE_OPERAND (arg1, 0))),
6956 TREE_OPERAND (arg1, 1)));
6958 /* Convert C1/(X*C2) into (C1/C2)/X. */
6959 if (flag_unsafe_math_optimizations
6960 && TREE_CODE (arg1) == MULT_EXPR
6961 && TREE_CODE (arg0) == REAL_CST
6962 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6964 tree tem = const_binop (RDIV_EXPR, arg0,
6965 TREE_OPERAND (arg1, 1), 0);
6967 return fold (build2 (RDIV_EXPR, type, tem,
6968 TREE_OPERAND (arg1, 0)));
6971 if (flag_unsafe_math_optimizations)
6973 enum built_in_function fcode = builtin_mathfn_code (arg1);
6974 /* Optimize x/expN(y) into x*expN(-y). */
6975 if (BUILTIN_EXPONENT_P (fcode))
6977 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6978 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
6979 tree arglist = build_tree_list (NULL_TREE,
6980 fold_convert (type, arg));
6981 arg1 = build_function_call_expr (expfn, arglist);
6982 return fold (build2 (MULT_EXPR, type, arg0, arg1));
6985 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6986 if (fcode == BUILT_IN_POW
6987 || fcode == BUILT_IN_POWF
6988 || fcode == BUILT_IN_POWL)
6990 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6991 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6992 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6993 tree neg11 = fold_convert (type, negate_expr (arg11));
6994 tree arglist = tree_cons(NULL_TREE, arg10,
6995 build_tree_list (NULL_TREE, neg11));
6996 arg1 = build_function_call_expr (powfn, arglist);
6997 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7001 if (flag_unsafe_math_optimizations)
7003 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7004 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7006 /* Optimize sin(x)/cos(x) as tan(x). */
7007 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7008 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7009 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7010 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7011 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7013 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7015 if (tanfn != NULL_TREE)
7016 return build_function_call_expr (tanfn,
7017 TREE_OPERAND (arg0, 1));
7020 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7021 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7022 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7023 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7024 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7025 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7027 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7029 if (tanfn != NULL_TREE)
7031 tree tmp = TREE_OPERAND (arg0, 1);
7032 tmp = build_function_call_expr (tanfn, tmp);
7033 return fold (build2 (RDIV_EXPR, type,
7034 build_real (type, dconst1), tmp));
7038 /* Optimize pow(x,c)/x as pow(x,c-1). */
7039 if (fcode0 == BUILT_IN_POW
7040 || fcode0 == BUILT_IN_POWF
7041 || fcode0 == BUILT_IN_POWL)
7043 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7044 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7045 if (TREE_CODE (arg01) == REAL_CST
7046 && ! TREE_CONSTANT_OVERFLOW (arg01)
7047 && operand_equal_p (arg1, arg00, 0))
7049 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7053 c = TREE_REAL_CST (arg01);
7054 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7055 arg = build_real (type, c);
7056 arglist = build_tree_list (NULL_TREE, arg);
7057 arglist = tree_cons (NULL_TREE, arg1, arglist);
7058 return build_function_call_expr (powfn, arglist);
7064 case TRUNC_DIV_EXPR:
7065 case ROUND_DIV_EXPR:
7066 case FLOOR_DIV_EXPR:
7068 case EXACT_DIV_EXPR:
7069 if (integer_onep (arg1))
7070 return non_lvalue (fold_convert (type, arg0));
7071 if (integer_zerop (arg1))
7074 if (!TYPE_UNSIGNED (type)
7075 && TREE_CODE (arg1) == INTEGER_CST
7076 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7077 && TREE_INT_CST_HIGH (arg1) == -1)
7078 return fold_convert (type, negate_expr (arg0));
7080 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7081 operation, EXACT_DIV_EXPR.
7083 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7084 At one time others generated faster code, it's not clear if they do
7085 after the last round to changes to the DIV code in expmed.c. */
7086 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7087 && multiple_of_p (type, arg0, arg1))
7088 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7090 if (TREE_CODE (arg1) == INTEGER_CST
7091 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7093 return fold_convert (type, tem);
7098 case FLOOR_MOD_EXPR:
7099 case ROUND_MOD_EXPR:
7100 case TRUNC_MOD_EXPR:
7101 if (integer_onep (arg1))
7102 return omit_one_operand (type, integer_zero_node, arg0);
7103 if (integer_zerop (arg1))
7105 /* X % -1 is zero. */
7106 if (!TYPE_UNSIGNED (type)
7107 && TREE_CODE (arg1) == INTEGER_CST
7108 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7109 && TREE_INT_CST_HIGH (arg1) == -1)
7110 return omit_one_operand (type, integer_zero_node, arg0);
7112 if (TREE_CODE (arg1) == INTEGER_CST
7113 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7115 return fold_convert (type, tem);
7121 if (integer_all_onesp (arg0))
7122 return omit_one_operand (type, arg0, arg1);
7126 /* Optimize -1 >> x for arithmetic right shifts. */
7127 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7128 return omit_one_operand (type, arg0, arg1);
7129 /* ... fall through ... */
7133 if (integer_zerop (arg1))
7134 return non_lvalue (fold_convert (type, arg0));
7135 if (integer_zerop (arg0))
7136 return omit_one_operand (type, arg0, arg1);
7138 /* Since negative shift count is not well-defined,
7139 don't try to compute it in the compiler. */
7140 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7142 /* Rewrite an LROTATE_EXPR by a constant into an
7143 RROTATE_EXPR by a new constant. */
7144 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7146 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7147 tem = fold_convert (TREE_TYPE (arg1), tem);
7148 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7149 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7152 /* If we have a rotate of a bit operation with the rotate count and
7153 the second operand of the bit operation both constant,
7154 permute the two operations. */
7155 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7156 && (TREE_CODE (arg0) == BIT_AND_EXPR
7157 || TREE_CODE (arg0) == BIT_IOR_EXPR
7158 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7159 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7160 return fold (build2 (TREE_CODE (arg0), type,
7161 fold (build2 (code, type,
7162 TREE_OPERAND (arg0, 0), arg1)),
7163 fold (build2 (code, type,
7164 TREE_OPERAND (arg0, 1), arg1))));
7166 /* Two consecutive rotates adding up to the width of the mode can
7168 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7169 && TREE_CODE (arg0) == RROTATE_EXPR
7170 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7171 && TREE_INT_CST_HIGH (arg1) == 0
7172 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7173 && ((TREE_INT_CST_LOW (arg1)
7174 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7175 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7176 return TREE_OPERAND (arg0, 0);
7181 if (operand_equal_p (arg0, arg1, 0))
7182 return omit_one_operand (type, arg0, arg1);
7183 if (INTEGRAL_TYPE_P (type)
7184 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7185 return omit_one_operand (type, arg1, arg0);
7189 if (operand_equal_p (arg0, arg1, 0))
7190 return omit_one_operand (type, arg0, arg1);
7191 if (INTEGRAL_TYPE_P (type)
7192 && TYPE_MAX_VALUE (type)
7193 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7194 return omit_one_operand (type, arg1, arg0);
7197 case TRUTH_NOT_EXPR:
7198 /* The argument to invert_truthvalue must have Boolean type. */
7199 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7200 arg0 = fold_convert (boolean_type_node, arg0);
7202 /* Note that the operand of this must be an int
7203 and its values must be 0 or 1.
7204 ("true" is a fixed value perhaps depending on the language,
7205 but we don't handle values other than 1 correctly yet.) */
7206 tem = invert_truthvalue (arg0);
7207 /* Avoid infinite recursion. */
7208 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7210 tem = fold_single_bit_test (code, arg0, arg1, type);
7215 return fold_convert (type, tem);
7217 case TRUTH_ANDIF_EXPR:
7218 /* Note that the operands of this must be ints
7219 and their values must be 0 or 1.
7220 ("true" is a fixed value perhaps depending on the language.) */
7221 /* If first arg is constant zero, return it. */
7222 if (integer_zerop (arg0))
7223 return fold_convert (type, arg0);
7224 case TRUTH_AND_EXPR:
7225 /* If either arg is constant true, drop it. */
7226 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7227 return non_lvalue (fold_convert (type, arg1));
7228 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7229 /* Preserve sequence points. */
7230 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7231 return non_lvalue (fold_convert (type, arg0));
7232 /* If second arg is constant zero, result is zero, but first arg
7233 must be evaluated. */
7234 if (integer_zerop (arg1))
7235 return omit_one_operand (type, arg1, arg0);
7236 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7237 case will be handled here. */
7238 if (integer_zerop (arg0))
7239 return omit_one_operand (type, arg0, arg1);
7242 /* We only do these simplifications if we are optimizing. */
7246 /* Check for things like (A || B) && (A || C). We can convert this
7247 to A || (B && C). Note that either operator can be any of the four
7248 truth and/or operations and the transformation will still be
7249 valid. Also note that we only care about order for the
7250 ANDIF and ORIF operators. If B contains side effects, this
7251 might change the truth-value of A. */
7252 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7253 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7254 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7255 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7256 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7257 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7259 tree a00 = TREE_OPERAND (arg0, 0);
7260 tree a01 = TREE_OPERAND (arg0, 1);
7261 tree a10 = TREE_OPERAND (arg1, 0);
7262 tree a11 = TREE_OPERAND (arg1, 1);
7263 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7264 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7265 && (code == TRUTH_AND_EXPR
7266 || code == TRUTH_OR_EXPR));
7268 if (operand_equal_p (a00, a10, 0))
7269 return fold (build2 (TREE_CODE (arg0), type, a00,
7270 fold (build2 (code, type, a01, a11))));
7271 else if (commutative && operand_equal_p (a00, a11, 0))
7272 return fold (build2 (TREE_CODE (arg0), type, a00,
7273 fold (build2 (code, type, a01, a10))));
7274 else if (commutative && operand_equal_p (a01, a10, 0))
7275 return fold (build2 (TREE_CODE (arg0), type, a01,
7276 fold (build2 (code, type, a00, a11))));
7278 /* This case if tricky because we must either have commutative
7279 operators or else A10 must not have side-effects. */
7281 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7282 && operand_equal_p (a01, a11, 0))
7283 return fold (build2 (TREE_CODE (arg0), type,
7284 fold (build2 (code, type, a00, a10)),
7288 /* See if we can build a range comparison. */
7289 if (0 != (tem = fold_range_test (t)))
7292 /* Check for the possibility of merging component references. If our
7293 lhs is another similar operation, try to merge its rhs with our
7294 rhs. Then try to merge our lhs and rhs. */
7295 if (TREE_CODE (arg0) == code
7296 && 0 != (tem = fold_truthop (code, type,
7297 TREE_OPERAND (arg0, 1), arg1)))
7298 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7300 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7305 case TRUTH_ORIF_EXPR:
7306 /* Note that the operands of this must be ints
7307 and their values must be 0 or true.
7308 ("true" is a fixed value perhaps depending on the language.) */
7309 /* If first arg is constant true, return it. */
7310 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7311 return fold_convert (type, arg0);
7313 /* If either arg is constant zero, drop it. */
7314 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7315 return non_lvalue (fold_convert (type, arg1));
7316 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7317 /* Preserve sequence points. */
7318 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7319 return non_lvalue (fold_convert (type, arg0));
7320 /* If second arg is constant true, result is true, but we must
7321 evaluate first arg. */
7322 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7323 return omit_one_operand (type, arg1, arg0);
7324 /* Likewise for first arg, but note this only occurs here for
7326 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7327 return omit_one_operand (type, arg0, arg1);
7330 case TRUTH_XOR_EXPR:
7331 /* If either arg is constant zero, drop it. */
7332 if (integer_zerop (arg0))
7333 return non_lvalue (fold_convert (type, arg1));
7334 if (integer_zerop (arg1))
7335 return non_lvalue (fold_convert (type, arg0));
7336 /* If either arg is constant true, this is a logical inversion. */
7337 if (integer_onep (arg0))
7338 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7339 if (integer_onep (arg1))
7340 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7341 /* Identical arguments cancel to zero. */
7342 if (operand_equal_p (arg0, arg1, 0))
7343 return omit_one_operand (type, integer_zero_node, arg0);
7352 /* If one arg is a real or integer constant, put it last. */
7353 if (tree_swap_operands_p (arg0, arg1, true))
7354 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7356 /* If this is an equality comparison of the address of a non-weak
7357 object against zero, then we know the result. */
7358 if ((code == EQ_EXPR || code == NE_EXPR)
7359 && TREE_CODE (arg0) == ADDR_EXPR
7360 && DECL_P (TREE_OPERAND (arg0, 0))
7361 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7362 && integer_zerop (arg1))
7363 return constant_boolean_node (code != EQ_EXPR, type);
7365 /* If this is an equality comparison of the address of two non-weak,
7366 unaliased symbols neither of which are extern (since we do not
7367 have access to attributes for externs), then we know the result. */
7368 if ((code == EQ_EXPR || code == NE_EXPR)
7369 && TREE_CODE (arg0) == ADDR_EXPR
7370 && DECL_P (TREE_OPERAND (arg0, 0))
7371 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7372 && ! lookup_attribute ("alias",
7373 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7374 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7375 && TREE_CODE (arg1) == ADDR_EXPR
7376 && DECL_P (TREE_OPERAND (arg1, 0))
7377 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7378 && ! lookup_attribute ("alias",
7379 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7380 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7381 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7382 ? code == EQ_EXPR : code != EQ_EXPR,
7385 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7387 tree targ0 = strip_float_extensions (arg0);
7388 tree targ1 = strip_float_extensions (arg1);
7389 tree newtype = TREE_TYPE (targ0);
7391 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7392 newtype = TREE_TYPE (targ1);
7394 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7395 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7396 return fold (build2 (code, type, fold_convert (newtype, targ0),
7397 fold_convert (newtype, targ1)));
7399 /* (-a) CMP (-b) -> b CMP a */
7400 if (TREE_CODE (arg0) == NEGATE_EXPR
7401 && TREE_CODE (arg1) == NEGATE_EXPR)
7402 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7403 TREE_OPERAND (arg0, 0)));
7405 if (TREE_CODE (arg1) == REAL_CST)
7407 REAL_VALUE_TYPE cst;
7408 cst = TREE_REAL_CST (arg1);
7410 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7411 if (TREE_CODE (arg0) == NEGATE_EXPR)
7413 fold (build2 (swap_tree_comparison (code), type,
7414 TREE_OPERAND (arg0, 0),
7415 build_real (TREE_TYPE (arg1),
7416 REAL_VALUE_NEGATE (cst))));
7418 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7419 /* a CMP (-0) -> a CMP 0 */
7420 if (REAL_VALUE_MINUS_ZERO (cst))
7421 return fold (build2 (code, type, arg0,
7422 build_real (TREE_TYPE (arg1), dconst0)));
7424 /* x != NaN is always true, other ops are always false. */
7425 if (REAL_VALUE_ISNAN (cst)
7426 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7428 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7429 return omit_one_operand (type, tem, arg0);
7432 /* Fold comparisons against infinity. */
7433 if (REAL_VALUE_ISINF (cst))
7435 tem = fold_inf_compare (code, type, arg0, arg1);
7436 if (tem != NULL_TREE)
7441 /* If this is a comparison of a real constant with a PLUS_EXPR
7442 or a MINUS_EXPR of a real constant, we can convert it into a
7443 comparison with a revised real constant as long as no overflow
7444 occurs when unsafe_math_optimizations are enabled. */
7445 if (flag_unsafe_math_optimizations
7446 && TREE_CODE (arg1) == REAL_CST
7447 && (TREE_CODE (arg0) == PLUS_EXPR
7448 || TREE_CODE (arg0) == MINUS_EXPR)
7449 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7450 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7451 ? MINUS_EXPR : PLUS_EXPR,
7452 arg1, TREE_OPERAND (arg0, 1), 0))
7453 && ! TREE_CONSTANT_OVERFLOW (tem))
7454 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7456 /* Likewise, we can simplify a comparison of a real constant with
7457 a MINUS_EXPR whose first operand is also a real constant, i.e.
7458 (c1 - x) < c2 becomes x > c1-c2. */
7459 if (flag_unsafe_math_optimizations
7460 && TREE_CODE (arg1) == REAL_CST
7461 && TREE_CODE (arg0) == MINUS_EXPR
7462 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7463 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7465 && ! TREE_CONSTANT_OVERFLOW (tem))
7466 return fold (build2 (swap_tree_comparison (code), type,
7467 TREE_OPERAND (arg0, 1), tem));
7469 /* Fold comparisons against built-in math functions. */
7470 if (TREE_CODE (arg1) == REAL_CST
7471 && flag_unsafe_math_optimizations
7472 && ! flag_errno_math)
7474 enum built_in_function fcode = builtin_mathfn_code (arg0);
7476 if (fcode != END_BUILTINS)
7478 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7479 if (tem != NULL_TREE)
7485 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7486 if (TREE_CONSTANT (arg1)
7487 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7488 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7489 /* This optimization is invalid for ordered comparisons
7490 if CONST+INCR overflows or if foo+incr might overflow.
7491 This optimization is invalid for floating point due to rounding.
7492 For pointer types we assume overflow doesn't happen. */
7493 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7494 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7495 && (code == EQ_EXPR || code == NE_EXPR))))
7497 tree varop, newconst;
7499 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7501 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7502 arg1, TREE_OPERAND (arg0, 1)));
7503 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7504 TREE_OPERAND (arg0, 0),
7505 TREE_OPERAND (arg0, 1));
7509 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7510 arg1, TREE_OPERAND (arg0, 1)));
7511 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7512 TREE_OPERAND (arg0, 0),
7513 TREE_OPERAND (arg0, 1));
7517 /* If VAROP is a reference to a bitfield, we must mask
7518 the constant by the width of the field. */
7519 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7520 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7522 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7523 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7524 tree folded_compare, shift;
7526 /* First check whether the comparison would come out
7527 always the same. If we don't do that we would
7528 change the meaning with the masking. */
7529 folded_compare = fold (build2 (code, type,
7530 TREE_OPERAND (varop, 0),
7532 if (integer_zerop (folded_compare)
7533 || integer_onep (folded_compare))
7534 return omit_one_operand (type, folded_compare, varop);
7536 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7538 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7540 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7544 return fold (build2 (code, type, varop, newconst));
7547 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7548 This transformation affects the cases which are handled in later
7549 optimizations involving comparisons with non-negative constants. */
7550 if (TREE_CODE (arg1) == INTEGER_CST
7551 && TREE_CODE (arg0) != INTEGER_CST
7552 && tree_int_cst_sgn (arg1) > 0)
7557 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7558 return fold (build2 (GT_EXPR, type, arg0, arg1));
7561 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7562 return fold (build2 (LE_EXPR, type, arg0, arg1));
7569 /* Comparisons with the highest or lowest possible integer of
7570 the specified size will have known values.
7572 This is quite similar to fold_relational_hi_lo; however, my
7573 attempts to share the code have been nothing but trouble.
7574 I give up for now. */
7576 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7578 if (TREE_CODE (arg1) == INTEGER_CST
7579 && ! TREE_CONSTANT_OVERFLOW (arg1)
7580 && width <= HOST_BITS_PER_WIDE_INT
7581 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7582 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7584 unsigned HOST_WIDE_INT signed_max;
7585 unsigned HOST_WIDE_INT max, min;
7587 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7589 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
7591 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7597 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7600 if (TREE_INT_CST_HIGH (arg1) == 0
7601 && TREE_INT_CST_LOW (arg1) == max)
7605 return omit_one_operand (type, integer_zero_node, arg0);
7608 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7611 return omit_one_operand (type, integer_one_node, arg0);
7614 return fold (build2 (NE_EXPR, type, arg0, arg1));
7616 /* The GE_EXPR and LT_EXPR cases above are not normally
7617 reached because of previous transformations. */
7622 else if (TREE_INT_CST_HIGH (arg1) == 0
7623 && TREE_INT_CST_LOW (arg1) == max - 1)
7627 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7628 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7630 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7631 return fold (build2 (NE_EXPR, type, arg0, arg1));
7635 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7636 && TREE_INT_CST_LOW (arg1) == min)
7640 return omit_one_operand (type, integer_zero_node, arg0);
7643 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7646 return omit_one_operand (type, integer_one_node, arg0);
7649 return fold (build2 (NE_EXPR, type, arg0, arg1));
7654 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7655 && TREE_INT_CST_LOW (arg1) == min + 1)
7659 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7660 return fold (build2 (NE_EXPR, type, arg0, arg1));
7662 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7663 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7668 else if (!in_gimple_form
7669 && TREE_INT_CST_HIGH (arg1) == 0
7670 && TREE_INT_CST_LOW (arg1) == signed_max
7671 && TYPE_UNSIGNED (TREE_TYPE (arg1))
7672 /* signed_type does not work on pointer types. */
7673 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7675 /* The following case also applies to X < signed_max+1
7676 and X >= signed_max+1 because previous transformations. */
7677 if (code == LE_EXPR || code == GT_EXPR)
7680 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7681 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7683 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7684 type, fold_convert (st0, arg0),
7685 fold_convert (st1, integer_zero_node)));
7691 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7692 a MINUS_EXPR of a constant, we can convert it into a comparison with
7693 a revised constant as long as no overflow occurs. */
7694 if ((code == EQ_EXPR || code == NE_EXPR)
7695 && TREE_CODE (arg1) == INTEGER_CST
7696 && (TREE_CODE (arg0) == PLUS_EXPR
7697 || TREE_CODE (arg0) == MINUS_EXPR)
7698 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7699 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7700 ? MINUS_EXPR : PLUS_EXPR,
7701 arg1, TREE_OPERAND (arg0, 1), 0))
7702 && ! TREE_CONSTANT_OVERFLOW (tem))
7703 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7705 /* Similarly for a NEGATE_EXPR. */
7706 else if ((code == EQ_EXPR || code == NE_EXPR)
7707 && TREE_CODE (arg0) == NEGATE_EXPR
7708 && TREE_CODE (arg1) == INTEGER_CST
7709 && 0 != (tem = negate_expr (arg1))
7710 && TREE_CODE (tem) == INTEGER_CST
7711 && ! TREE_CONSTANT_OVERFLOW (tem))
7712 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7714 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7715 for !=. Don't do this for ordered comparisons due to overflow. */
7716 else if ((code == NE_EXPR || code == EQ_EXPR)
7717 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7718 return fold (build2 (code, type,
7719 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7721 /* If we are widening one operand of an integer comparison,
7722 see if the other operand is similarly being widened. Perhaps we
7723 can do the comparison in the narrower type. */
7724 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7725 && TREE_CODE (arg0) == NOP_EXPR
7726 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7727 && (code == EQ_EXPR || code == NE_EXPR
7728 || TYPE_UNSIGNED (TREE_TYPE (arg0))
7729 == TYPE_UNSIGNED (TREE_TYPE (tem)))
7730 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7731 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7732 || (TREE_CODE (t1) == INTEGER_CST
7733 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7734 return fold (build2 (code, type, tem,
7735 fold_convert (TREE_TYPE (tem), t1)));
7737 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7738 constant, we can simplify it. */
7739 else if (TREE_CODE (arg1) == INTEGER_CST
7740 && (TREE_CODE (arg0) == MIN_EXPR
7741 || TREE_CODE (arg0) == MAX_EXPR)
7742 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7743 return optimize_minmax_comparison (t);
7745 /* If we are comparing an ABS_EXPR with a constant, we can
7746 convert all the cases into explicit comparisons, but they may
7747 well not be faster than doing the ABS and one comparison.
7748 But ABS (X) <= C is a range comparison, which becomes a subtraction
7749 and a comparison, and is probably faster. */
7750 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7751 && TREE_CODE (arg0) == ABS_EXPR
7752 && ! TREE_SIDE_EFFECTS (arg0)
7753 && (0 != (tem = negate_expr (arg1)))
7754 && TREE_CODE (tem) == INTEGER_CST
7755 && ! TREE_CONSTANT_OVERFLOW (tem))
7756 return fold (build2 (TRUTH_ANDIF_EXPR, type,
7757 build2 (GE_EXPR, type,
7758 TREE_OPERAND (arg0, 0), tem),
7759 build2 (LE_EXPR, type,
7760 TREE_OPERAND (arg0, 0), arg1)));
7762 /* If this is an EQ or NE comparison with zero and ARG0 is
7763 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7764 two operations, but the latter can be done in one less insn
7765 on machines that have only two-operand insns or on which a
7766 constant cannot be the first operand. */
7767 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7768 && TREE_CODE (arg0) == BIT_AND_EXPR)
7770 tree arg00 = TREE_OPERAND (arg0, 0);
7771 tree arg01 = TREE_OPERAND (arg0, 1);
7772 if (TREE_CODE (arg00) == LSHIFT_EXPR
7773 && integer_onep (TREE_OPERAND (arg00, 0)))
7775 fold (build2 (code, type,
7776 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7777 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
7778 arg01, TREE_OPERAND (arg00, 1)),
7779 fold_convert (TREE_TYPE (arg0),
7782 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7783 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7785 fold (build2 (code, type,
7786 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7787 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
7788 arg00, TREE_OPERAND (arg01, 1)),
7789 fold_convert (TREE_TYPE (arg0),
7794 /* If this is an NE or EQ comparison of zero against the result of a
7795 signed MOD operation whose second operand is a power of 2, make
7796 the MOD operation unsigned since it is simpler and equivalent. */
7797 if ((code == NE_EXPR || code == EQ_EXPR)
7798 && integer_zerop (arg1)
7799 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
7800 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7801 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7802 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7803 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7804 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7806 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7807 tree newmod = build2 (TREE_CODE (arg0), newtype,
7808 fold_convert (newtype,
7809 TREE_OPERAND (arg0, 0)),
7810 fold_convert (newtype,
7811 TREE_OPERAND (arg0, 1)));
7813 return build2 (code, type, newmod, fold_convert (newtype, arg1));
7816 /* If this is an NE comparison of zero with an AND of one, remove the
7817 comparison since the AND will give the correct value. */
7818 if (code == NE_EXPR && integer_zerop (arg1)
7819 && TREE_CODE (arg0) == BIT_AND_EXPR
7820 && integer_onep (TREE_OPERAND (arg0, 1)))
7821 return fold_convert (type, arg0);
7823 /* If we have (A & C) == C where C is a power of 2, convert this into
7824 (A & C) != 0. Similarly for NE_EXPR. */
7825 if ((code == EQ_EXPR || code == NE_EXPR)
7826 && TREE_CODE (arg0) == BIT_AND_EXPR
7827 && integer_pow2p (TREE_OPERAND (arg0, 1))
7828 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7829 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7830 arg0, integer_zero_node));
7832 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7833 2, then fold the expression into shifts and logical operations. */
7834 tem = fold_single_bit_test (code, arg0, arg1, type);
7838 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7839 Similarly for NE_EXPR. */
7840 if ((code == EQ_EXPR || code == NE_EXPR)
7841 && TREE_CODE (arg0) == BIT_AND_EXPR
7842 && TREE_CODE (arg1) == INTEGER_CST
7843 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7846 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7847 arg1, build1 (BIT_NOT_EXPR,
7848 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7849 TREE_OPERAND (arg0, 1))));
7850 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7851 if (integer_nonzerop (dandnotc))
7852 return omit_one_operand (type, rslt, arg0);
7855 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7856 Similarly for NE_EXPR. */
7857 if ((code == EQ_EXPR || code == NE_EXPR)
7858 && TREE_CODE (arg0) == BIT_IOR_EXPR
7859 && TREE_CODE (arg1) == INTEGER_CST
7860 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7863 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
7864 TREE_OPERAND (arg0, 1),
7865 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7866 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7867 if (integer_nonzerop (candnotd))
7868 return omit_one_operand (type, rslt, arg0);
7871 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7872 and similarly for >= into !=. */
7873 if ((code == LT_EXPR || code == GE_EXPR)
7874 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7875 && TREE_CODE (arg1) == LSHIFT_EXPR
7876 && integer_onep (TREE_OPERAND (arg1, 0)))
7877 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7878 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7879 TREE_OPERAND (arg1, 1)),
7880 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7882 else if ((code == LT_EXPR || code == GE_EXPR)
7883 && TYPE_UNSIGNED (TREE_TYPE (arg0))
7884 && (TREE_CODE (arg1) == NOP_EXPR
7885 || TREE_CODE (arg1) == CONVERT_EXPR)
7886 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7887 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7889 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7890 fold_convert (TREE_TYPE (arg0),
7891 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7892 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7894 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7896 /* Simplify comparison of something with itself. (For IEEE
7897 floating-point, we can only do some of these simplifications.) */
7898 if (operand_equal_p (arg0, arg1, 0))
7903 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7904 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7905 return constant_boolean_node (1, type);
7910 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7911 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7912 return constant_boolean_node (1, type);
7913 return fold (build2 (EQ_EXPR, type, arg0, arg1));
7916 /* For NE, we can only do this simplification if integer
7917 or we don't honor IEEE floating point NaNs. */
7918 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7919 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7921 /* ... fall through ... */
7924 return constant_boolean_node (0, type);
7930 /* If we are comparing an expression that just has comparisons
7931 of two integer values, arithmetic expressions of those comparisons,
7932 and constants, we can simplify it. There are only three cases
7933 to check: the two values can either be equal, the first can be
7934 greater, or the second can be greater. Fold the expression for
7935 those three values. Since each value must be 0 or 1, we have
7936 eight possibilities, each of which corresponds to the constant 0
7937 or 1 or one of the six possible comparisons.
7939 This handles common cases like (a > b) == 0 but also handles
7940 expressions like ((x > y) - (y > x)) > 0, which supposedly
7941 occur in macroized code. */
7943 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7945 tree cval1 = 0, cval2 = 0;
7948 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7949 /* Don't handle degenerate cases here; they should already
7950 have been handled anyway. */
7951 && cval1 != 0 && cval2 != 0
7952 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7953 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7954 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7955 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7956 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7957 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7958 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7960 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7961 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7963 /* We can't just pass T to eval_subst in case cval1 or cval2
7964 was the same as ARG1. */
7967 = fold (build2 (code, type,
7968 eval_subst (arg0, cval1, maxval,
7972 = fold (build2 (code, type,
7973 eval_subst (arg0, cval1, maxval,
7977 = fold (build2 (code, type,
7978 eval_subst (arg0, cval1, minval,
7982 /* All three of these results should be 0 or 1. Confirm they
7983 are. Then use those values to select the proper code
7986 if ((integer_zerop (high_result)
7987 || integer_onep (high_result))
7988 && (integer_zerop (equal_result)
7989 || integer_onep (equal_result))
7990 && (integer_zerop (low_result)
7991 || integer_onep (low_result)))
7993 /* Make a 3-bit mask with the high-order bit being the
7994 value for `>', the next for '=', and the low for '<'. */
7995 switch ((integer_onep (high_result) * 4)
7996 + (integer_onep (equal_result) * 2)
7997 + integer_onep (low_result))
8001 return omit_one_operand (type, integer_zero_node, arg0);
8022 return omit_one_operand (type, integer_one_node, arg0);
8025 tem = build2 (code, type, cval1, cval2);
8027 return save_expr (tem);
8034 /* If this is a comparison of a field, we may be able to simplify it. */
8035 if (((TREE_CODE (arg0) == COMPONENT_REF
8036 && lang_hooks.can_use_bit_fields_p ())
8037 || TREE_CODE (arg0) == BIT_FIELD_REF)
8038 && (code == EQ_EXPR || code == NE_EXPR)
8039 /* Handle the constant case even without -O
8040 to make sure the warnings are given. */
8041 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8043 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8048 /* If this is a comparison of complex values and either or both sides
8049 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8050 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8051 This may prevent needless evaluations. */
8052 if ((code == EQ_EXPR || code == NE_EXPR)
8053 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8054 && (TREE_CODE (arg0) == COMPLEX_EXPR
8055 || TREE_CODE (arg1) == COMPLEX_EXPR
8056 || TREE_CODE (arg0) == COMPLEX_CST
8057 || TREE_CODE (arg1) == COMPLEX_CST))
8059 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8060 tree real0, imag0, real1, imag1;
8062 arg0 = save_expr (arg0);
8063 arg1 = save_expr (arg1);
8064 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8065 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8066 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8067 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8069 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8072 fold (build2 (code, type, real0, real1)),
8073 fold (build2 (code, type, imag0, imag1))));
8076 /* Optimize comparisons of strlen vs zero to a compare of the
8077 first character of the string vs zero. To wit,
8078 strlen(ptr) == 0 => *ptr == 0
8079 strlen(ptr) != 0 => *ptr != 0
8080 Other cases should reduce to one of these two (or a constant)
8081 due to the return value of strlen being unsigned. */
8082 if ((code == EQ_EXPR || code == NE_EXPR)
8083 && integer_zerop (arg1)
8084 && TREE_CODE (arg0) == CALL_EXPR)
8086 tree fndecl = get_callee_fndecl (arg0);
8090 && DECL_BUILT_IN (fndecl)
8091 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8092 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8093 && (arglist = TREE_OPERAND (arg0, 1))
8094 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8095 && ! TREE_CHAIN (arglist))
8096 return fold (build2 (code, type,
8097 build1 (INDIRECT_REF, char_type_node,
8098 TREE_VALUE(arglist)),
8099 integer_zero_node));
8102 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8103 into a single range test. */
8104 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8105 && TREE_CODE (arg1) == INTEGER_CST
8106 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8107 && !integer_zerop (TREE_OPERAND (arg0, 1))
8108 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8109 && !TREE_OVERFLOW (arg1))
8111 t1 = fold_div_compare (code, type, arg0, arg1);
8112 if (t1 != NULL_TREE)
8116 /* Both ARG0 and ARG1 are known to be constants at this point. */
8117 t1 = fold_relational_const (code, type, arg0, arg1);
8118 return (t1 == NULL_TREE ? t : t1);
8121 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8122 so all simple results must be passed through pedantic_non_lvalue. */
8123 if (TREE_CODE (arg0) == INTEGER_CST)
8125 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8126 /* Only optimize constant conditions when the selected branch
8127 has the same type as the COND_EXPR. This avoids optimizing
8128 away "c ? x : throw", where the throw has a void type. */
8129 if (! VOID_TYPE_P (TREE_TYPE (tem))
8130 || VOID_TYPE_P (type))
8131 return pedantic_non_lvalue (tem);
8134 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8135 return pedantic_omit_one_operand (type, arg1, arg0);
8137 /* If we have A op B ? A : C, we may be able to convert this to a
8138 simpler expression, depending on the operation and the values
8139 of B and C. Signed zeros prevent all of these transformations,
8140 for reasons given above each one. */
8142 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8143 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8144 arg1, TREE_OPERAND (arg0, 1))
8145 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8147 tree arg2 = TREE_OPERAND (t, 2);
8148 enum tree_code comp_code = TREE_CODE (arg0);
8152 /* If we have A op 0 ? A : -A, consider applying the following
8155 A == 0? A : -A same as -A
8156 A != 0? A : -A same as A
8157 A >= 0? A : -A same as abs (A)
8158 A > 0? A : -A same as abs (A)
8159 A <= 0? A : -A same as -abs (A)
8160 A < 0? A : -A same as -abs (A)
8162 None of these transformations work for modes with signed
8163 zeros. If A is +/-0, the first two transformations will
8164 change the sign of the result (from +0 to -0, or vice
8165 versa). The last four will fix the sign of the result,
8166 even though the original expressions could be positive or
8167 negative, depending on the sign of A.
8169 Note that all these transformations are correct if A is
8170 NaN, since the two alternatives (A and -A) are also NaNs. */
8171 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8172 ? real_zerop (TREE_OPERAND (arg0, 1))
8173 : integer_zerop (TREE_OPERAND (arg0, 1)))
8174 && TREE_CODE (arg2) == NEGATE_EXPR
8175 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8179 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8180 tem = fold_convert (type, negate_expr (tem));
8181 return pedantic_non_lvalue (tem);
8183 return pedantic_non_lvalue (fold_convert (type, arg1));
8186 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8187 arg1 = fold_convert (lang_hooks.types.signed_type
8188 (TREE_TYPE (arg1)), arg1);
8189 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8190 return pedantic_non_lvalue (fold_convert (type, arg1));
8193 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8194 arg1 = fold_convert (lang_hooks.types.signed_type
8195 (TREE_TYPE (arg1)), arg1);
8196 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8197 arg1 = negate_expr (fold_convert (type, arg1));
8198 return pedantic_non_lvalue (arg1);
8203 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8204 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8205 both transformations are correct when A is NaN: A != 0
8206 is then true, and A == 0 is false. */
8208 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8210 if (comp_code == NE_EXPR)
8211 return pedantic_non_lvalue (fold_convert (type, arg1));
8212 else if (comp_code == EQ_EXPR)
8213 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8216 /* Try some transformations of A op B ? A : B.
8218 A == B? A : B same as B
8219 A != B? A : B same as A
8220 A >= B? A : B same as max (A, B)
8221 A > B? A : B same as max (B, A)
8222 A <= B? A : B same as min (A, B)
8223 A < B? A : B same as min (B, A)
8225 As above, these transformations don't work in the presence
8226 of signed zeros. For example, if A and B are zeros of
8227 opposite sign, the first two transformations will change
8228 the sign of the result. In the last four, the original
8229 expressions give different results for (A=+0, B=-0) and
8230 (A=-0, B=+0), but the transformed expressions do not.
8232 The first two transformations are correct if either A or B
8233 is a NaN. In the first transformation, the condition will
8234 be false, and B will indeed be chosen. In the case of the
8235 second transformation, the condition A != B will be true,
8236 and A will be chosen.
8238 The conversions to max() and min() are not correct if B is
8239 a number and A is not. The conditions in the original
8240 expressions will be false, so all four give B. The min()
8241 and max() versions would give a NaN instead. */
8242 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8243 arg2, TREE_OPERAND (arg0, 0)))
8245 tree comp_op0 = TREE_OPERAND (arg0, 0);
8246 tree comp_op1 = TREE_OPERAND (arg0, 1);
8247 tree comp_type = TREE_TYPE (comp_op0);
8249 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8250 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8260 return pedantic_non_lvalue (fold_convert (type, arg2));
8262 return pedantic_non_lvalue (fold_convert (type, arg1));
8265 /* In C++ a ?: expression can be an lvalue, so put the
8266 operand which will be used if they are equal first
8267 so that we can convert this back to the
8268 corresponding COND_EXPR. */
8269 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8270 return pedantic_non_lvalue (fold_convert
8271 (type, fold (build2 (MIN_EXPR, comp_type,
8272 (comp_code == LE_EXPR
8273 ? comp_op0 : comp_op1),
8274 (comp_code == LE_EXPR
8275 ? comp_op1 : comp_op0)))));
8279 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8280 return pedantic_non_lvalue (fold_convert
8281 (type, fold (build2 (MAX_EXPR, comp_type,
8282 (comp_code == GE_EXPR
8283 ? comp_op0 : comp_op1),
8284 (comp_code == GE_EXPR
8285 ? comp_op1 : comp_op0)))));
8292 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8293 we might still be able to simplify this. For example,
8294 if C1 is one less or one more than C2, this might have started
8295 out as a MIN or MAX and been transformed by this function.
8296 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8298 if (INTEGRAL_TYPE_P (type)
8299 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8300 && TREE_CODE (arg2) == INTEGER_CST)
8304 /* We can replace A with C1 in this case. */
8305 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8306 return fold (build3 (code, type, TREE_OPERAND (t, 0), arg1,
8307 TREE_OPERAND (t, 2)));
8310 /* If C1 is C2 + 1, this is min(A, C2). */
8311 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8313 && operand_equal_p (TREE_OPERAND (arg0, 1),
8314 const_binop (PLUS_EXPR, arg2,
8315 integer_one_node, 0),
8317 return pedantic_non_lvalue
8318 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8322 /* If C1 is C2 - 1, this is min(A, C2). */
8323 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8325 && operand_equal_p (TREE_OPERAND (arg0, 1),
8326 const_binop (MINUS_EXPR, arg2,
8327 integer_one_node, 0),
8329 return pedantic_non_lvalue
8330 (fold (build2 (MIN_EXPR, type, arg1, arg2)));
8334 /* If C1 is C2 - 1, this is max(A, C2). */
8335 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
8337 && operand_equal_p (TREE_OPERAND (arg0, 1),
8338 const_binop (MINUS_EXPR, arg2,
8339 integer_one_node, 0),
8341 return pedantic_non_lvalue
8342 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8346 /* If C1 is C2 + 1, this is max(A, C2). */
8347 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
8349 && operand_equal_p (TREE_OPERAND (arg0, 1),
8350 const_binop (PLUS_EXPR, arg2,
8351 integer_one_node, 0),
8353 return pedantic_non_lvalue
8354 (fold (build2 (MAX_EXPR, type, arg1, arg2)));
8363 /* If the second operand is simpler than the third, swap them
8364 since that produces better jump optimization results. */
8365 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8366 TREE_OPERAND (t, 2), false))
8368 /* See if this can be inverted. If it can't, possibly because
8369 it was a floating-point inequality comparison, don't do
8371 tem = invert_truthvalue (arg0);
8373 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8374 return fold (build3 (code, type, tem,
8375 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8378 /* Convert A ? 1 : 0 to simply A. */
8379 if (integer_onep (TREE_OPERAND (t, 1))
8380 && integer_zerop (TREE_OPERAND (t, 2))
8381 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8382 call to fold will try to move the conversion inside
8383 a COND, which will recurse. In that case, the COND_EXPR
8384 is probably the best choice, so leave it alone. */
8385 && type == TREE_TYPE (arg0))
8386 return pedantic_non_lvalue (arg0);
8388 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8389 over COND_EXPR in cases such as floating point comparisons. */
8390 if (integer_zerop (TREE_OPERAND (t, 1))
8391 && integer_onep (TREE_OPERAND (t, 2))
8392 && truth_value_p (TREE_CODE (arg0)))
8393 return pedantic_non_lvalue (fold_convert (type,
8394 invert_truthvalue (arg0)));
8396 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8397 operation is simply A & 2. */
8399 if (integer_zerop (TREE_OPERAND (t, 2))
8400 && TREE_CODE (arg0) == NE_EXPR
8401 && integer_zerop (TREE_OPERAND (arg0, 1))
8402 && integer_pow2p (arg1)
8403 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8404 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8405 arg1, OEP_ONLY_CONST))
8406 return pedantic_non_lvalue (fold_convert (type,
8407 TREE_OPERAND (arg0, 0)));
8409 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8410 if (integer_zerop (TREE_OPERAND (t, 2))
8411 && truth_value_p (TREE_CODE (arg0))
8412 && truth_value_p (TREE_CODE (arg1)))
8413 return pedantic_non_lvalue (fold (build2 (TRUTH_ANDIF_EXPR, type,
8416 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8417 if (integer_onep (TREE_OPERAND (t, 2))
8418 && truth_value_p (TREE_CODE (arg0))
8419 && truth_value_p (TREE_CODE (arg1)))
8421 /* Only perform transformation if ARG0 is easily inverted. */
8422 tem = invert_truthvalue (arg0);
8423 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8424 return pedantic_non_lvalue (fold (build2 (TRUTH_ORIF_EXPR, type,
8431 /* When pedantic, a compound expression can be neither an lvalue
8432 nor an integer constant expression. */
8433 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8435 /* Don't let (0, 0) be null pointer constant. */
8436 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8437 : fold_convert (type, arg1);
8438 return pedantic_non_lvalue (tem);
8442 return build_complex (type, arg0, arg1);
8446 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8448 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8449 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8450 TREE_OPERAND (arg0, 1));
8451 else if (TREE_CODE (arg0) == COMPLEX_CST)
8452 return TREE_REALPART (arg0);
8453 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8454 return fold (build2 (TREE_CODE (arg0), type,
8455 fold (build1 (REALPART_EXPR, type,
8456 TREE_OPERAND (arg0, 0))),
8457 fold (build1 (REALPART_EXPR, type,
8458 TREE_OPERAND (arg0, 1)))));
8462 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8463 return fold_convert (type, integer_zero_node);
8464 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8465 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8466 TREE_OPERAND (arg0, 0));
8467 else if (TREE_CODE (arg0) == COMPLEX_CST)
8468 return TREE_IMAGPART (arg0);
8469 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8470 return fold (build2 (TREE_CODE (arg0), type,
8471 fold (build1 (IMAGPART_EXPR, type,
8472 TREE_OPERAND (arg0, 0))),
8473 fold (build1 (IMAGPART_EXPR, type,
8474 TREE_OPERAND (arg0, 1)))));
8477 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8479 case CLEANUP_POINT_EXPR:
8480 if (! has_cleanups (arg0))
8481 return TREE_OPERAND (t, 0);
8484 enum tree_code code0 = TREE_CODE (arg0);
8485 int kind0 = TREE_CODE_CLASS (code0);
8486 tree arg00 = TREE_OPERAND (arg0, 0);
8489 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8490 return fold (build1 (code0, type,
8491 fold (build1 (CLEANUP_POINT_EXPR,
8492 TREE_TYPE (arg00), arg00))));
8494 if (kind0 == '<' || kind0 == '2'
8495 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8496 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8497 || code0 == TRUTH_XOR_EXPR)
8499 arg01 = TREE_OPERAND (arg0, 1);
8501 if (TREE_CONSTANT (arg00)
8502 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8503 && ! has_cleanups (arg00)))
8504 return fold (build2 (code0, type, arg00,
8505 fold (build1 (CLEANUP_POINT_EXPR,
8506 TREE_TYPE (arg01), arg01))));
8508 if (TREE_CONSTANT (arg01))
8509 return fold (build2 (code0, type,
8510 fold (build1 (CLEANUP_POINT_EXPR,
8511 TREE_TYPE (arg00), arg00)),
8519 /* Check for a built-in function. */
8520 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8521 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8523 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8525 tree tmp = fold_builtin (t);
8533 } /* switch (code) */
8536 #ifdef ENABLE_FOLD_CHECKING
8539 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8540 static void fold_check_failed (tree, tree);
8541 void print_fold_checksum (tree);
8543 /* When --enable-checking=fold, compute a digest of expr before
8544 and after actual fold call to see if fold did not accidentally
8545 change original expr. */
8552 unsigned char checksum_before[16], checksum_after[16];
8555 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8556 md5_init_ctx (&ctx);
8557 fold_checksum_tree (expr, &ctx, ht);
8558 md5_finish_ctx (&ctx, checksum_before);
8561 ret = fold_1 (expr);
8563 md5_init_ctx (&ctx);
8564 fold_checksum_tree (expr, &ctx, ht);
8565 md5_finish_ctx (&ctx, checksum_after);
8568 if (memcmp (checksum_before, checksum_after, 16))
8569 fold_check_failed (expr, ret);
8575 print_fold_checksum (tree expr)
8578 unsigned char checksum[16], cnt;
8581 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8582 md5_init_ctx (&ctx);
8583 fold_checksum_tree (expr, &ctx, ht);
8584 md5_finish_ctx (&ctx, checksum);
8586 for (cnt = 0; cnt < 16; ++cnt)
8587 fprintf (stderr, "%02x", checksum[cnt]);
8588 putc ('\n', stderr);
8592 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8594 internal_error ("fold check: original tree changed by fold");
8598 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8601 enum tree_code code;
8602 char buf[sizeof (struct tree_decl)];
8605 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8606 > sizeof (struct tree_decl)
8607 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8611 slot = htab_find_slot (ht, expr, INSERT);
8615 code = TREE_CODE (expr);
8616 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8618 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8619 memcpy (buf, expr, tree_size (expr));
8621 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8623 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8625 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8626 memcpy (buf, expr, tree_size (expr));
8628 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8630 else if (TREE_CODE_CLASS (code) == 't'
8631 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8633 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8634 memcpy (buf, expr, tree_size (expr));
8636 TYPE_POINTER_TO (expr) = NULL;
8637 TYPE_REFERENCE_TO (expr) = NULL;
8639 md5_process_bytes (expr, tree_size (expr), ctx);
8640 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8641 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8642 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8643 len = TREE_CODE_LENGTH (code);
8644 switch (TREE_CODE_CLASS (code))
8650 md5_process_bytes (TREE_STRING_POINTER (expr),
8651 TREE_STRING_LENGTH (expr), ctx);
8654 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8655 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8658 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8668 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8669 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8672 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8673 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8682 case SAVE_EXPR: len = 2; break;
8683 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8684 case RTL_EXPR: len = 0; break;
8685 case WITH_CLEANUP_EXPR: len = 2; break;
8694 for (i = 0; i < len; ++i)
8695 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8698 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8699 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8700 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8701 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8702 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8703 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8704 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8705 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8706 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8707 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8708 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8711 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8712 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8713 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8714 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8715 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8716 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8717 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8718 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8719 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8720 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8729 /* Perform constant folding and related simplification of initializer
8730 expression EXPR. This behaves identically to "fold" but ignores
8731 potential run-time traps and exceptions that fold must preserve. */
8734 fold_initializer (tree expr)
8736 int saved_signaling_nans = flag_signaling_nans;
8737 int saved_trapping_math = flag_trapping_math;
8738 int saved_trapv = flag_trapv;
8741 flag_signaling_nans = 0;
8742 flag_trapping_math = 0;
8745 result = fold (expr);
8747 flag_signaling_nans = saved_signaling_nans;
8748 flag_trapping_math = saved_trapping_math;
8749 flag_trapv = saved_trapv;
8754 /* Determine if first argument is a multiple of second argument. Return 0 if
8755 it is not, or we cannot easily determined it to be.
8757 An example of the sort of thing we care about (at this point; this routine
8758 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8759 fold cases do now) is discovering that
8761 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8767 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8769 This code also handles discovering that
8771 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8773 is a multiple of 8 so we don't have to worry about dealing with a
8776 Note that we *look* inside a SAVE_EXPR only to determine how it was
8777 calculated; it is not safe for fold to do much of anything else with the
8778 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8779 at run time. For example, the latter example above *cannot* be implemented
8780 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8781 evaluation time of the original SAVE_EXPR is not necessarily the same at
8782 the time the new expression is evaluated. The only optimization of this
8783 sort that would be valid is changing
8785 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8789 SAVE_EXPR (I) * SAVE_EXPR (J)
8791 (where the same SAVE_EXPR (J) is used in the original and the
8792 transformed version). */
8795 multiple_of_p (tree type, tree top, tree bottom)
8797 if (operand_equal_p (top, bottom, 0))
8800 if (TREE_CODE (type) != INTEGER_TYPE)
8803 switch (TREE_CODE (top))
8806 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8807 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8811 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8812 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8815 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8819 op1 = TREE_OPERAND (top, 1);
8820 /* const_binop may not detect overflow correctly,
8821 so check for it explicitly here. */
8822 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8823 > TREE_INT_CST_LOW (op1)
8824 && TREE_INT_CST_HIGH (op1) == 0
8825 && 0 != (t1 = fold_convert (type,
8826 const_binop (LSHIFT_EXPR,
8829 && ! TREE_OVERFLOW (t1))
8830 return multiple_of_p (type, t1, bottom);
8835 /* Can't handle conversions from non-integral or wider integral type. */
8836 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8837 || (TYPE_PRECISION (type)
8838 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8841 /* .. fall through ... */
8844 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8847 if (TREE_CODE (bottom) != INTEGER_CST
8848 || (TYPE_UNSIGNED (type)
8849 && (tree_int_cst_sgn (top) < 0
8850 || tree_int_cst_sgn (bottom) < 0)))
8852 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8860 /* Return true if `t' is known to be non-negative. */
8863 tree_expr_nonnegative_p (tree t)
8865 switch (TREE_CODE (t))
8871 return tree_int_cst_sgn (t) >= 0;
8874 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8877 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8878 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8879 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8881 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8882 both unsigned and at least 2 bits shorter than the result. */
8883 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8884 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8885 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8887 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8888 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8889 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8890 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8892 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8893 TYPE_PRECISION (inner2)) + 1;
8894 return prec < TYPE_PRECISION (TREE_TYPE (t));
8900 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8902 /* x * x for floating point x is always non-negative. */
8903 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8905 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8906 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8909 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8910 both unsigned and their total bits is shorter than the result. */
8911 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8912 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8913 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8915 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8916 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8917 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
8918 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
8919 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8920 < TYPE_PRECISION (TREE_TYPE (t));
8924 case TRUNC_DIV_EXPR:
8926 case FLOOR_DIV_EXPR:
8927 case ROUND_DIV_EXPR:
8928 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8929 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8931 case TRUNC_MOD_EXPR:
8933 case FLOOR_MOD_EXPR:
8934 case ROUND_MOD_EXPR:
8935 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8938 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8939 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8942 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8943 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8946 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8947 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8951 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8952 tree outer_type = TREE_TYPE (t);
8954 if (TREE_CODE (outer_type) == REAL_TYPE)
8956 if (TREE_CODE (inner_type) == REAL_TYPE)
8957 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8958 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8960 if (TYPE_UNSIGNED (inner_type))
8962 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8965 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8967 if (TREE_CODE (inner_type) == REAL_TYPE)
8968 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8969 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8970 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8971 && TYPE_UNSIGNED (inner_type);
8977 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8978 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8980 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8982 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8983 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8985 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8986 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8988 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8990 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8992 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8993 case NON_LVALUE_EXPR:
8994 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8996 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8998 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
9002 tree fndecl = get_callee_fndecl (t);
9003 tree arglist = TREE_OPERAND (t, 1);
9005 && DECL_BUILT_IN (fndecl)
9006 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9007 switch (DECL_FUNCTION_CODE (fndecl))
9009 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9010 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9011 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9012 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9014 CASE_BUILTIN_F (BUILT_IN_ACOS)
9015 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9016 CASE_BUILTIN_F (BUILT_IN_CABS)
9017 CASE_BUILTIN_F (BUILT_IN_COSH)
9018 CASE_BUILTIN_F (BUILT_IN_ERFC)
9019 CASE_BUILTIN_F (BUILT_IN_EXP)
9020 CASE_BUILTIN_F (BUILT_IN_EXP10)
9021 CASE_BUILTIN_F (BUILT_IN_EXP2)
9022 CASE_BUILTIN_F (BUILT_IN_FABS)
9023 CASE_BUILTIN_F (BUILT_IN_FDIM)
9024 CASE_BUILTIN_F (BUILT_IN_FREXP)
9025 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9026 CASE_BUILTIN_F (BUILT_IN_POW10)
9027 CASE_BUILTIN_I (BUILT_IN_FFS)
9028 CASE_BUILTIN_I (BUILT_IN_PARITY)
9029 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9033 CASE_BUILTIN_F (BUILT_IN_SQRT)
9034 /* sqrt(-0.0) is -0.0. */
9035 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9037 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9039 CASE_BUILTIN_F (BUILT_IN_ASINH)
9040 CASE_BUILTIN_F (BUILT_IN_ATAN)
9041 CASE_BUILTIN_F (BUILT_IN_ATANH)
9042 CASE_BUILTIN_F (BUILT_IN_CBRT)
9043 CASE_BUILTIN_F (BUILT_IN_CEIL)
9044 CASE_BUILTIN_F (BUILT_IN_ERF)
9045 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9046 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9047 CASE_BUILTIN_F (BUILT_IN_FMOD)
9048 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9049 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9050 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9051 CASE_BUILTIN_F (BUILT_IN_LRINT)
9052 CASE_BUILTIN_F (BUILT_IN_LROUND)
9053 CASE_BUILTIN_F (BUILT_IN_MODF)
9054 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9055 CASE_BUILTIN_F (BUILT_IN_POW)
9056 CASE_BUILTIN_F (BUILT_IN_RINT)
9057 CASE_BUILTIN_F (BUILT_IN_ROUND)
9058 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9059 CASE_BUILTIN_F (BUILT_IN_SINH)
9060 CASE_BUILTIN_F (BUILT_IN_TANH)
9061 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9062 /* True if the 1st argument is nonnegative. */
9063 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9065 CASE_BUILTIN_F (BUILT_IN_FMAX)
9066 /* True if the 1st OR 2nd arguments are nonnegative. */
9067 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9068 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9070 CASE_BUILTIN_F (BUILT_IN_FMIN)
9071 /* True if the 1st AND 2nd arguments are nonnegative. */
9072 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9073 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9075 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9076 /* True if the 2nd argument is nonnegative. */
9077 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9081 #undef CASE_BUILTIN_F
9082 #undef CASE_BUILTIN_I
9086 /* ... fall through ... */
9089 if (truth_value_p (TREE_CODE (t)))
9090 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9094 /* We don't know sign of `t', so be conservative and return false. */
9098 /* Return true when T is an address and is known to be nonzero.
9099 For floating point we further ensure that T is not denormal.
9100 Similar logic is present in nonzero_address in rtlanal.h */
9103 tree_expr_nonzero_p (tree t)
9105 tree type = TREE_TYPE (t);
9107 /* Doing something useful for floating point would need more work. */
9108 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9111 switch (TREE_CODE (t))
9114 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9115 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9118 return !integer_zerop (t);
9121 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9123 /* With the presence of negative values it is hard
9124 to say something. */
9125 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9126 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9128 /* One of operands must be positive and the other non-negative. */
9129 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9130 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9135 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9137 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9138 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9144 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9145 tree outer_type = TREE_TYPE (t);
9147 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9148 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9153 /* Weak declarations may link to NULL. */
9154 if (DECL_P (TREE_OPERAND (t, 0)))
9155 return !DECL_WEAK (TREE_OPERAND (t, 0));
9156 /* Constants and all other cases are never weak. */
9160 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9161 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9164 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9165 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9168 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9170 /* When both operands are nonzero, then MAX must be too. */
9171 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9174 /* MAX where operand 0 is positive is positive. */
9175 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9177 /* MAX where operand 1 is positive is positive. */
9178 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9179 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9186 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9189 case NON_LVALUE_EXPR:
9190 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9193 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9194 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9202 /* Return true if `r' is known to be non-negative.
9203 Only handles constants at the moment. */
9206 rtl_expr_nonnegative_p (rtx r)
9208 switch (GET_CODE (r))
9211 return INTVAL (r) >= 0;
9214 if (GET_MODE (r) == VOIDmode)
9215 return CONST_DOUBLE_HIGH (r) >= 0;
9223 units = CONST_VECTOR_NUNITS (r);
9225 for (i = 0; i < units; ++i)
9227 elt = CONST_VECTOR_ELT (r, i);
9228 if (!rtl_expr_nonnegative_p (elt))
9237 /* These are always nonnegative. */
9246 /* See if we are applying CODE, a relational to the highest or lowest
9247 possible integer of TYPE. If so, then the result is a compile
9251 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9256 enum tree_code code = *code_p;
9257 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9259 if (TREE_CODE (op1) == INTEGER_CST
9260 && ! TREE_CONSTANT_OVERFLOW (op1)
9261 && width <= HOST_BITS_PER_WIDE_INT
9262 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9263 || POINTER_TYPE_P (TREE_TYPE (op1))))
9265 unsigned HOST_WIDE_INT signed_max;
9266 unsigned HOST_WIDE_INT max, min;
9268 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9270 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9272 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9278 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9281 if (TREE_INT_CST_HIGH (op1) == 0
9282 && TREE_INT_CST_LOW (op1) == max)
9286 return omit_one_operand (type, integer_zero_node, op0);
9292 return omit_one_operand (type, integer_one_node, op0);
9298 /* The GE_EXPR and LT_EXPR cases above are not normally
9299 reached because of previous transformations. */
9304 else if (TREE_INT_CST_HIGH (op1) == 0
9305 && TREE_INT_CST_LOW (op1) == max - 1)
9310 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9314 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9319 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9320 && TREE_INT_CST_LOW (op1) == min)
9324 return omit_one_operand (type, integer_zero_node, op0);
9331 return omit_one_operand (type, integer_one_node, op0);
9340 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9341 && TREE_INT_CST_LOW (op1) == min + 1)
9346 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9350 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9356 else if (TREE_INT_CST_HIGH (op1) == 0
9357 && TREE_INT_CST_LOW (op1) == signed_max
9358 && TYPE_UNSIGNED (TREE_TYPE (op1))
9359 /* signed_type does not work on pointer types. */
9360 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9362 /* The following case also applies to X < signed_max+1
9363 and X >= signed_max+1 because previous transformations. */
9364 if (code == LE_EXPR || code == GT_EXPR)
9366 tree st0, st1, exp, retval;
9367 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9368 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9370 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9372 fold_convert (st0, op0),
9373 fold_convert (st1, integer_zero_node));
9376 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9378 TREE_OPERAND (exp, 0),
9379 TREE_OPERAND (exp, 1));
9381 /* If we are in gimple form, then returning EXP would create
9382 non-gimple expressions. Clearing it is safe and insures
9383 we do not allow a non-gimple expression to escape. */
9387 return (retval ? retval : exp);
9396 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9397 attempt to fold the expression to a constant without modifying TYPE,
9400 If the expression could be simplified to a constant, then return
9401 the constant. If the expression would not be simplified to a
9402 constant, then return NULL_TREE.
9404 Note this is primarily designed to be called after gimplification
9405 of the tree structures and when at least one operand is a constant.
9406 As a result of those simplifying assumptions this routine is far
9407 simpler than the generic fold routine. */
9410 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9418 /* If this is a commutative operation, and ARG0 is a constant, move it
9419 to ARG1 to reduce the number of tests below. */
9420 if (commutative_tree_code (code)
9421 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9428 /* If either operand is a complex type, extract its real component. */
9429 if (TREE_CODE (op0) == COMPLEX_CST)
9430 subop0 = TREE_REALPART (op0);
9434 if (TREE_CODE (op1) == COMPLEX_CST)
9435 subop1 = TREE_REALPART (op1);
9439 /* Note if either argument is not a real or integer constant.
9440 With a few exceptions, simplification is limited to cases
9441 where both arguments are constants. */
9442 if ((TREE_CODE (subop0) != INTEGER_CST
9443 && TREE_CODE (subop0) != REAL_CST)
9444 || (TREE_CODE (subop1) != INTEGER_CST
9445 && TREE_CODE (subop1) != REAL_CST))
9451 /* (plus (address) (const_int)) is a constant. */
9452 if (TREE_CODE (op0) == PLUS_EXPR
9453 && TREE_CODE (op1) == INTEGER_CST
9454 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9455 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9456 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9458 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9460 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9461 const_binop (PLUS_EXPR, op1,
9462 TREE_OPERAND (op0, 1), 0));
9470 /* Both arguments are constants. Simplify. */
9471 tem = const_binop (code, op0, op1, 0);
9472 if (tem != NULL_TREE)
9474 /* The return value should always have the same type as
9475 the original expression. */
9476 if (TREE_TYPE (tem) != type)
9477 tem = fold_convert (type, tem);
9484 /* Fold &x - &x. This can happen from &x.foo - &x.
9485 This is unsafe for certain floats even in non-IEEE formats.
9486 In IEEE, it is unsafe because it does wrong for NaNs.
9487 Also note that operand_equal_p is always false if an
9488 operand is volatile. */
9489 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9490 return fold_convert (type, integer_zero_node);
9496 /* Special case multiplication or bitwise AND where one argument
9498 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9499 return omit_one_operand (type, op1, op0);
9501 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9502 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9503 && real_zerop (op1))
9504 return omit_one_operand (type, op1, op0);
9509 /* Special case when we know the result will be all ones. */
9510 if (integer_all_onesp (op1))
9511 return omit_one_operand (type, op1, op0);
9515 case TRUNC_DIV_EXPR:
9516 case ROUND_DIV_EXPR:
9517 case FLOOR_DIV_EXPR:
9519 case EXACT_DIV_EXPR:
9520 case TRUNC_MOD_EXPR:
9521 case ROUND_MOD_EXPR:
9522 case FLOOR_MOD_EXPR:
9525 /* Division by zero is undefined. */
9526 if (integer_zerop (op1))
9529 if (TREE_CODE (op1) == REAL_CST
9530 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9531 && real_zerop (op1))
9537 if (INTEGRAL_TYPE_P (type)
9538 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9539 return omit_one_operand (type, op1, op0);
9544 if (INTEGRAL_TYPE_P (type)
9545 && TYPE_MAX_VALUE (type)
9546 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9547 return omit_one_operand (type, op1, op0);
9552 /* Optimize -1 >> x for arithmetic right shifts. */
9553 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
9554 return omit_one_operand (type, op0, op1);
9555 /* ... fall through ... */
9558 if (integer_zerop (op0))
9559 return omit_one_operand (type, op0, op1);
9561 /* Since negative shift count is not well-defined, don't
9562 try to compute it in the compiler. */
9563 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
9570 /* -1 rotated either direction by any amount is still -1. */
9571 if (integer_all_onesp (op0))
9572 return omit_one_operand (type, op0, op1);
9574 /* 0 rotated either direction by any amount is still zero. */
9575 if (integer_zerop (op0))
9576 return omit_one_operand (type, op0, op1);
9582 return build_complex (type, op0, op1);
9591 /* If one arg is a real or integer constant, put it last. */
9592 if ((TREE_CODE (op0) == INTEGER_CST
9593 && TREE_CODE (op1) != INTEGER_CST)
9594 || (TREE_CODE (op0) == REAL_CST
9595 && TREE_CODE (op0) != REAL_CST))
9602 code = swap_tree_comparison (code);
9605 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9606 This transformation affects the cases which are handled in later
9607 optimizations involving comparisons with non-negative constants. */
9608 if (TREE_CODE (op1) == INTEGER_CST
9609 && TREE_CODE (op0) != INTEGER_CST
9610 && tree_int_cst_sgn (op1) > 0)
9616 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9621 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9629 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
9636 return fold_relational_const (code, type, op0, op1);
9639 /* This could probably be handled. */
9642 case TRUTH_AND_EXPR:
9643 /* If second arg is constant zero, result is zero, but first arg
9644 must be evaluated. */
9645 if (integer_zerop (op1))
9646 return omit_one_operand (type, op1, op0);
9647 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9648 case will be handled here. */
9649 if (integer_zerop (op0))
9650 return omit_one_operand (type, op0, op1);
9651 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9652 return constant_boolean_node (true, type);
9656 /* If second arg is constant true, result is true, but we must
9657 evaluate first arg. */
9658 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
9659 return omit_one_operand (type, op1, op0);
9660 /* Likewise for first arg, but note this only occurs here for
9662 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
9663 return omit_one_operand (type, op0, op1);
9664 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9665 return constant_boolean_node (false, type);
9668 case TRUTH_XOR_EXPR:
9669 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9671 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
9672 return constant_boolean_node (x, type);
9681 /* Given the components of a unary expression CODE, TYPE and OP0,
9682 attempt to fold the expression to a constant without modifying
9685 If the expression could be simplified to a constant, then return
9686 the constant. If the expression would not be simplified to a
9687 constant, then return NULL_TREE.
9689 Note this is primarily designed to be called after gimplification
9690 of the tree structures and when op0 is a constant. As a result
9691 of those simplifying assumptions this routine is far simpler than
9692 the generic fold routine. */
9695 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
9698 /* Make sure we have a suitable constant argument. */
9699 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
9703 if (TREE_CODE (op0) == COMPLEX_CST)
9704 subop = TREE_REALPART (op0);
9708 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
9717 case FIX_TRUNC_EXPR:
9718 case FIX_FLOOR_EXPR:
9720 return fold_convert_const (code, type, op0);
9723 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9724 return fold_negate_const (op0, type);
9729 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
9730 return fold_abs_const (op0, type);
9735 if (TREE_CODE (op0) == INTEGER_CST)
9736 return fold_not_const (op0, type);
9741 if (TREE_CODE (op0) == COMPLEX_CST)
9742 return TREE_REALPART (op0);
9747 if (TREE_CODE (op0) == COMPLEX_CST)
9748 return TREE_IMAGPART (op0);
9753 if (TREE_CODE (op0) == COMPLEX_CST
9754 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
9755 return build_complex (type, TREE_REALPART (op0),
9756 negate_expr (TREE_IMAGPART (op0)));
9764 /* If EXP represents referencing an element in a constant string
9765 (either via pointer arithmetic or array indexing), return the
9766 tree representing the value accessed, otherwise return NULL. */
9769 fold_read_from_constant_string (tree exp)
9771 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
9773 tree exp1 = TREE_OPERAND (exp, 0);
9777 if (TREE_CODE (exp) == INDIRECT_REF)
9779 string = string_constant (exp1, &index);
9783 tree domain = TYPE_DOMAIN (TREE_TYPE (exp1));
9784 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
9785 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
9787 /* Optimize the special-case of a zero lower bound.
9789 We convert the low_bound to sizetype to avoid some problems
9790 with constant folding. (E.g. suppose the lower bound is 1,
9791 and its mode is QI. Without the conversion,l (ARRAY
9792 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9793 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
9794 if (! integer_zerop (low_bound))
9795 index = size_diffop (index, fold_convert (sizetype, low_bound));
9801 && TREE_CODE (string) == STRING_CST
9802 && TREE_CODE (index) == INTEGER_CST
9803 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
9804 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
9806 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
9807 return fold_convert (TREE_TYPE (exp),
9808 build_int_2 ((TREE_STRING_POINTER (string)
9809 [TREE_INT_CST_LOW (index)]), 0));
9814 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9815 an integer constant or real constant.
9817 TYPE is the type of the result. */
9820 fold_negate_const (tree arg0, tree type)
9824 if (TREE_CODE (arg0) == INTEGER_CST)
9826 unsigned HOST_WIDE_INT low;
9828 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9829 TREE_INT_CST_HIGH (arg0),
9831 t = build_int_2 (low, high);
9832 TREE_TYPE (t) = type;
9834 = (TREE_OVERFLOW (arg0)
9835 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
9836 TREE_CONSTANT_OVERFLOW (t)
9837 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9839 else if (TREE_CODE (arg0) == REAL_CST)
9840 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9841 #ifdef ENABLE_CHECKING
9849 /* Return the tree for abs (ARG0) when ARG0 is known to be either
9850 an integer constant or real constant.
9852 TYPE is the type of the result. */
9855 fold_abs_const (tree arg0, tree type)
9859 if (TREE_CODE (arg0) == INTEGER_CST)
9861 /* If the value is unsigned, then the absolute value is
9862 the same as the ordinary value. */
9863 if (TYPE_UNSIGNED (type))
9865 /* Similarly, if the value is non-negative. */
9866 else if (INT_CST_LT (integer_minus_one_node, arg0))
9868 /* If the value is negative, then the absolute value is
9872 unsigned HOST_WIDE_INT low;
9874 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9875 TREE_INT_CST_HIGH (arg0),
9877 t = build_int_2 (low, high);
9878 TREE_TYPE (t) = type;
9880 = (TREE_OVERFLOW (arg0)
9881 | force_fit_type (t, overflow));
9882 TREE_CONSTANT_OVERFLOW (t)
9883 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9887 else if (TREE_CODE (arg0) == REAL_CST)
9889 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
9890 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9894 #ifdef ENABLE_CHECKING
9902 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
9903 constant. TYPE is the type of the result. */
9906 fold_not_const (tree arg0, tree type)
9910 if (TREE_CODE (arg0) == INTEGER_CST)
9912 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
9913 ~ TREE_INT_CST_HIGH (arg0));
9914 TREE_TYPE (t) = type;
9915 force_fit_type (t, 0);
9916 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
9917 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
9919 #ifdef ENABLE_CHECKING
9927 /* Given CODE, a relational operator, the target type, TYPE and two
9928 constant operands OP0 and OP1, return the result of the
9929 relational operation. If the result is not a compile time
9930 constant, then return NULL_TREE. */
9933 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
9938 /* From here on, the only cases we handle are when the result is
9939 known to be a constant.
9941 To compute GT, swap the arguments and do LT.
9942 To compute GE, do LT and invert the result.
9943 To compute LE, swap the arguments, do LT and invert the result.
9944 To compute NE, do EQ and invert the result.
9946 Therefore, the code below must handle only EQ and LT. */
9948 if (code == LE_EXPR || code == GT_EXPR)
9950 tem = op0, op0 = op1, op1 = tem;
9951 code = swap_tree_comparison (code);
9954 /* Note that it is safe to invert for real values here because we
9955 will check below in the one case that it matters. */
9959 if (code == NE_EXPR || code == GE_EXPR)
9962 code = invert_tree_comparison (code, false);
9965 /* Compute a result for LT or EQ if args permit;
9966 Otherwise return T. */
9967 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
9969 if (code == EQ_EXPR)
9970 tem = build_int_2 (tree_int_cst_equal (op0, op1), 0);
9972 tem = build_int_2 ((TYPE_UNSIGNED (TREE_TYPE (op0))
9973 ? INT_CST_LT_UNSIGNED (op0, op1)
9974 : INT_CST_LT (op0, op1)),
9978 else if (code == EQ_EXPR && !TREE_SIDE_EFFECTS (op0)
9979 && integer_zerop (op1) && tree_expr_nonzero_p (op0))
9980 tem = build_int_2 (0, 0);
9982 /* Two real constants can be compared explicitly. */
9983 else if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
9985 /* If either operand is a NaN, the result is false with two
9986 exceptions: First, an NE_EXPR is true on NaNs, but that case
9987 is already handled correctly since we will be inverting the
9988 result for NE_EXPR. Second, if we had inverted a LE_EXPR
9989 or a GE_EXPR into a LT_EXPR, we must return true so that it
9990 will be inverted into false. */
9992 if (REAL_VALUE_ISNAN (TREE_REAL_CST (op0))
9993 || REAL_VALUE_ISNAN (TREE_REAL_CST (op1)))
9994 tem = build_int_2 (invert && code == LT_EXPR, 0);
9996 else if (code == EQ_EXPR)
9997 tem = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (op0),
9998 TREE_REAL_CST (op1)),
10001 tem = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (op0),
10002 TREE_REAL_CST (op1)),
10006 if (tem == NULL_TREE)
10010 TREE_INT_CST_LOW (tem) ^= 1;
10012 TREE_TYPE (tem) = type;
10013 if (TREE_CODE (type) == BOOLEAN_TYPE)
10014 return lang_hooks.truthvalue_conversion (tem);
10018 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10019 avoid confusing the gimplify process. */
10022 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10024 if (TREE_CODE (t) == INDIRECT_REF)
10026 t = TREE_OPERAND (t, 0);
10027 if (TREE_TYPE (t) != ptrtype)
10028 t = build1 (NOP_EXPR, ptrtype, t);
10033 while (TREE_CODE (base) == COMPONENT_REF
10034 || TREE_CODE (base) == ARRAY_REF)
10035 base = TREE_OPERAND (base, 0);
10037 TREE_ADDRESSABLE (base) = 1;
10039 t = build1 (ADDR_EXPR, ptrtype, t);
10046 build_fold_addr_expr (tree t)
10048 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10051 /* Builds an expression for an indirection through T, simplifying some
10055 build_fold_indirect_ref (tree t)
10057 tree type = TREE_TYPE (TREE_TYPE (t));
10062 if (TREE_CODE (sub) == ADDR_EXPR)
10064 tree op = TREE_OPERAND (sub, 0);
10065 tree optype = TREE_TYPE (op);
10067 if (lang_hooks.types_compatible_p (type, optype))
10069 /* *(foo *)&fooarray => fooarray[0] */
10070 else if (TREE_CODE (optype) == ARRAY_TYPE
10071 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10072 return build2 (ARRAY_REF, type, op, size_zero_node);
10075 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10076 subtype = TREE_TYPE (sub);
10077 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10078 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10080 sub = build_fold_indirect_ref (sub);
10081 return build2 (ARRAY_REF, type, sub, size_zero_node);
10084 return build1 (INDIRECT_REF, type, t);
10087 #include "gt-fold-const.h"