1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
154 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
167 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
170 *low = words[0] + words[1] * BASE;
171 *hi = words[2] + words[3] * BASE;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
182 force_fit_type (tree t, int overflow)
184 unsigned HOST_WIDE_INT low;
188 if (TREE_CODE (t) == REAL_CST)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
195 else if (TREE_CODE (t) != INTEGER_CST)
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t))
202 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow unless they
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
250 /* Return nonzero if signed overflow occurred. */
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
266 unsigned HOST_WIDE_INT l;
270 h = h1 + h2 + (l < l1);
274 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
290 return (*hv & h1) < 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
311 HOST_WIDE_INT arg1[4];
312 HOST_WIDE_INT arg2[4];
313 HOST_WIDE_INT prod[4 * 2];
314 unsigned HOST_WIDE_INT carry;
316 unsigned HOST_WIDE_INT toplow, neglow;
317 HOST_WIDE_INT tophigh, neghigh;
319 encode (arg1, l1, h1);
320 encode (arg2, l2, h2);
322 memset (prod, 0, sizeof prod);
324 for (i = 0; i < 4; i++)
327 for (j = 0; j < 4; j++)
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry += arg1[i] * arg2[j];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
334 prod[k] = LOWPART (carry);
335 carry = HIGHPART (carry);
340 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod + 4, &toplow, &tophigh);
347 neg_double (l2, h2, &neglow, &neghigh);
348 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
352 neg_double (l1, h1, &neglow, &neghigh);
353 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
365 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366 HOST_WIDE_INT count, unsigned int prec,
367 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
369 unsigned HOST_WIDE_INT signmask;
373 rshift_double (l1, h1, -count, prec, lv, hv, arith);
377 #ifdef SHIFT_COUNT_TRUNCATED
378 if (SHIFT_COUNT_TRUNCATED)
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
434 unsigned HOST_WIDE_INT signmask;
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
440 #ifdef SHIFT_COUNT_TRUNCATED
441 if (SHIFT_COUNT_TRUNCATED)
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
452 else if (count >= HOST_BITS_PER_WIDE_INT)
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
586 /* This unsigned division rounds toward zero. */
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
639 { /* scale divisor and dividend */
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
718 decode (quo, lquo, hquo);
721 /* If result is negative, make it so. */
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, <wice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
813 negate_mathfn_p (enum built_in_function code)
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
841 negate_expr_p (tree t)
843 unsigned HOST_WIDE_INT val;
850 type = TREE_TYPE (t);
853 switch (TREE_CODE (t))
856 if (TREE_UNSIGNED (type) || ! flag_trapv)
859 /* Check that -CST will not overflow type. */
860 prec = TYPE_PRECISION (type);
861 if (prec > HOST_BITS_PER_WIDE_INT)
863 if (TREE_INT_CST_LOW (t) != 0)
865 prec -= HOST_BITS_PER_WIDE_INT;
866 val = TREE_INT_CST_HIGH (t);
869 val = TREE_INT_CST_LOW (t);
870 if (prec < HOST_BITS_PER_WIDE_INT)
871 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
872 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
879 return negate_expr_p (TREE_REALPART (t))
880 && negate_expr_p (TREE_IMAGPART (t));
883 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
885 /* -(A + B) -> (-B) - A. */
886 if (negate_expr_p (TREE_OPERAND (t, 1))
887 && reorder_operands_p (TREE_OPERAND (t, 0),
888 TREE_OPERAND (t, 1)))
890 /* -(A + B) -> (-A) - B. */
891 return negate_expr_p (TREE_OPERAND (t, 0));
894 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
895 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1));
900 if (TREE_UNSIGNED (TREE_TYPE (t)))
906 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
907 return negate_expr_p (TREE_OPERAND (t, 1))
908 || negate_expr_p (TREE_OPERAND (t, 0));
912 /* Negate -((double)float) as (double)(-float). */
913 if (TREE_CODE (type) == REAL_TYPE)
915 tree tem = strip_float_extensions (t);
917 return negate_expr_p (tem);
922 /* Negate -f(x) as f(-x). */
923 if (negate_mathfn_p (builtin_mathfn_code (t)))
924 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
933 /* Given T, an expression, return the negation of T. Allow for T to be
934 null, in which case return null. */
945 type = TREE_TYPE (t);
948 switch (TREE_CODE (t))
952 unsigned HOST_WIDE_INT low;
954 int overflow = neg_double (TREE_INT_CST_LOW (t),
955 TREE_INT_CST_HIGH (t),
957 tem = build_int_2 (low, high);
958 TREE_TYPE (tem) = type;
961 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
962 TREE_CONSTANT_OVERFLOW (tem)
963 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
965 if (! TREE_OVERFLOW (tem)
966 || TREE_UNSIGNED (type)
972 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
973 /* Two's complement FP formats, such as c4x, may overflow. */
974 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
975 return fold_convert (type, tem);
980 tree rpart = negate_expr (TREE_REALPART (t));
981 tree ipart = negate_expr (TREE_IMAGPART (t));
983 if ((TREE_CODE (rpart) == REAL_CST
984 && TREE_CODE (ipart) == REAL_CST)
985 || (TREE_CODE (rpart) == INTEGER_CST
986 && TREE_CODE (ipart) == INTEGER_CST))
987 return build_complex (type, rpart, ipart);
992 return fold_convert (type, TREE_OPERAND (t, 0));
995 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
997 /* -(A + B) -> (-B) - A. */
998 if (negate_expr_p (TREE_OPERAND (t, 1))
999 && reorder_operands_p (TREE_OPERAND (t, 0),
1000 TREE_OPERAND (t, 1)))
1001 return fold_convert (type,
1002 fold (build (MINUS_EXPR, TREE_TYPE (t),
1003 negate_expr (TREE_OPERAND (t, 1)),
1004 TREE_OPERAND (t, 0))));
1005 /* -(A + B) -> (-A) - B. */
1006 if (negate_expr_p (TREE_OPERAND (t, 0)))
1007 return fold_convert (type,
1008 fold (build (MINUS_EXPR, TREE_TYPE (t),
1009 negate_expr (TREE_OPERAND (t, 0)),
1010 TREE_OPERAND (t, 1))));
1015 /* - (A - B) -> B - A */
1016 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1017 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1018 return fold_convert (type,
1019 fold (build (MINUS_EXPR, TREE_TYPE (t),
1020 TREE_OPERAND (t, 1),
1021 TREE_OPERAND (t, 0))));
1025 if (TREE_UNSIGNED (TREE_TYPE (t)))
1031 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1033 tem = TREE_OPERAND (t, 1);
1034 if (negate_expr_p (tem))
1035 return fold_convert (type,
1036 fold (build (TREE_CODE (t), TREE_TYPE (t),
1037 TREE_OPERAND (t, 0),
1038 negate_expr (tem))));
1039 tem = TREE_OPERAND (t, 0);
1040 if (negate_expr_p (tem))
1041 return fold_convert (type,
1042 fold (build (TREE_CODE (t), TREE_TYPE (t),
1044 TREE_OPERAND (t, 1))));
1049 /* Convert -((double)float) into (double)(-float). */
1050 if (TREE_CODE (type) == REAL_TYPE)
1052 tem = strip_float_extensions (t);
1053 if (tem != t && negate_expr_p (tem))
1054 return fold_convert (type, negate_expr (tem));
1059 /* Negate -f(x) as f(-x). */
1060 if (negate_mathfn_p (builtin_mathfn_code (t))
1061 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1063 tree fndecl, arg, arglist;
1065 fndecl = get_callee_fndecl (t);
1066 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1067 arglist = build_tree_list (NULL_TREE, arg);
1068 return build_function_call_expr (fndecl, arglist);
1076 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1077 return fold_convert (type, tem);
1080 /* Split a tree IN into a constant, literal and variable parts that could be
1081 combined with CODE to make IN. "constant" means an expression with
1082 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1083 commutative arithmetic operation. Store the constant part into *CONP,
1084 the literal in *LITP and return the variable part. If a part isn't
1085 present, set it to null. If the tree does not decompose in this way,
1086 return the entire tree as the variable part and the other parts as null.
1088 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1089 case, we negate an operand that was subtracted. Except if it is a
1090 literal for which we use *MINUS_LITP instead.
1092 If NEGATE_P is true, we are negating all of IN, again except a literal
1093 for which we use *MINUS_LITP instead.
1095 If IN is itself a literal or constant, return it as appropriate.
1097 Note that we do not guarantee that any of the three values will be the
1098 same type as IN, but they will have the same signedness and mode. */
1101 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1102 tree *minus_litp, int negate_p)
1110 /* Strip any conversions that don't change the machine mode or signedness. */
1111 STRIP_SIGN_NOPS (in);
1113 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1115 else if (TREE_CODE (in) == code
1116 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1117 /* We can associate addition and subtraction together (even
1118 though the C standard doesn't say so) for integers because
1119 the value is not affected. For reals, the value might be
1120 affected, so we can't. */
1121 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1122 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1124 tree op0 = TREE_OPERAND (in, 0);
1125 tree op1 = TREE_OPERAND (in, 1);
1126 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1127 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1129 /* First see if either of the operands is a literal, then a constant. */
1130 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1131 *litp = op0, op0 = 0;
1132 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1133 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1135 if (op0 != 0 && TREE_CONSTANT (op0))
1136 *conp = op0, op0 = 0;
1137 else if (op1 != 0 && TREE_CONSTANT (op1))
1138 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1140 /* If we haven't dealt with either operand, this is not a case we can
1141 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1142 if (op0 != 0 && op1 != 0)
1147 var = op1, neg_var_p = neg1_p;
1149 /* Now do any needed negations. */
1151 *minus_litp = *litp, *litp = 0;
1153 *conp = negate_expr (*conp);
1155 var = negate_expr (var);
1157 else if (TREE_CONSTANT (in))
1165 *minus_litp = *litp, *litp = 0;
1166 else if (*minus_litp)
1167 *litp = *minus_litp, *minus_litp = 0;
1168 *conp = negate_expr (*conp);
1169 var = negate_expr (var);
1175 /* Re-associate trees split by the above function. T1 and T2 are either
1176 expressions to associate or null. Return the new expression, if any. If
1177 we build an operation, do it in TYPE and with CODE. */
1180 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1187 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1188 try to fold this since we will have infinite recursion. But do
1189 deal with any NEGATE_EXPRs. */
1190 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1191 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1193 if (code == PLUS_EXPR)
1195 if (TREE_CODE (t1) == NEGATE_EXPR)
1196 return build (MINUS_EXPR, type, fold_convert (type, t2),
1197 fold_convert (type, TREE_OPERAND (t1, 0)));
1198 else if (TREE_CODE (t2) == NEGATE_EXPR)
1199 return build (MINUS_EXPR, type, fold_convert (type, t1),
1200 fold_convert (type, TREE_OPERAND (t2, 0)));
1202 return build (code, type, fold_convert (type, t1),
1203 fold_convert (type, t2));
1206 return fold (build (code, type, fold_convert (type, t1),
1207 fold_convert (type, t2)));
1210 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1211 to produce a new constant.
1213 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1216 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1218 unsigned HOST_WIDE_INT int1l, int2l;
1219 HOST_WIDE_INT int1h, int2h;
1220 unsigned HOST_WIDE_INT low;
1222 unsigned HOST_WIDE_INT garbagel;
1223 HOST_WIDE_INT garbageh;
1225 tree type = TREE_TYPE (arg1);
1226 int uns = TREE_UNSIGNED (type);
1228 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1230 int no_overflow = 0;
1232 int1l = TREE_INT_CST_LOW (arg1);
1233 int1h = TREE_INT_CST_HIGH (arg1);
1234 int2l = TREE_INT_CST_LOW (arg2);
1235 int2h = TREE_INT_CST_HIGH (arg2);
1240 low = int1l | int2l, hi = int1h | int2h;
1244 low = int1l ^ int2l, hi = int1h ^ int2h;
1248 low = int1l & int2l, hi = int1h & int2h;
1254 /* It's unclear from the C standard whether shifts can overflow.
1255 The following code ignores overflow; perhaps a C standard
1256 interpretation ruling is needed. */
1257 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1265 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1270 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1274 neg_double (int2l, int2h, &low, &hi);
1275 add_double (int1l, int1h, low, hi, &low, &hi);
1276 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1280 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1283 case TRUNC_DIV_EXPR:
1284 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1285 case EXACT_DIV_EXPR:
1286 /* This is a shortcut for a common special case. */
1287 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1288 && ! TREE_CONSTANT_OVERFLOW (arg1)
1289 && ! TREE_CONSTANT_OVERFLOW (arg2)
1290 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1292 if (code == CEIL_DIV_EXPR)
1295 low = int1l / int2l, hi = 0;
1299 /* ... fall through ... */
1301 case ROUND_DIV_EXPR:
1302 if (int2h == 0 && int2l == 1)
1304 low = int1l, hi = int1h;
1307 if (int1l == int2l && int1h == int2h
1308 && ! (int1l == 0 && int1h == 0))
1313 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1314 &low, &hi, &garbagel, &garbageh);
1317 case TRUNC_MOD_EXPR:
1318 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1319 /* This is a shortcut for a common special case. */
1320 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1321 && ! TREE_CONSTANT_OVERFLOW (arg1)
1322 && ! TREE_CONSTANT_OVERFLOW (arg2)
1323 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1325 if (code == CEIL_MOD_EXPR)
1327 low = int1l % int2l, hi = 0;
1331 /* ... fall through ... */
1333 case ROUND_MOD_EXPR:
1334 overflow = div_and_round_double (code, uns,
1335 int1l, int1h, int2l, int2h,
1336 &garbagel, &garbageh, &low, &hi);
1342 low = (((unsigned HOST_WIDE_INT) int1h
1343 < (unsigned HOST_WIDE_INT) int2h)
1344 || (((unsigned HOST_WIDE_INT) int1h
1345 == (unsigned HOST_WIDE_INT) int2h)
1348 low = (int1h < int2h
1349 || (int1h == int2h && int1l < int2l));
1351 if (low == (code == MIN_EXPR))
1352 low = int1l, hi = int1h;
1354 low = int2l, hi = int2h;
1361 /* If this is for a sizetype, can be represented as one (signed)
1362 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1365 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1366 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1367 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1368 return size_int_type_wide (low, type);
1371 t = build_int_2 (low, hi);
1372 TREE_TYPE (t) = TREE_TYPE (arg1);
1377 ? (!uns || is_sizetype) && overflow
1378 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1380 | TREE_OVERFLOW (arg1)
1381 | TREE_OVERFLOW (arg2));
1383 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1384 So check if force_fit_type truncated the value. */
1386 && ! TREE_OVERFLOW (t)
1387 && (TREE_INT_CST_HIGH (t) != hi
1388 || TREE_INT_CST_LOW (t) != low))
1389 TREE_OVERFLOW (t) = 1;
1391 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1392 | TREE_CONSTANT_OVERFLOW (arg1)
1393 | TREE_CONSTANT_OVERFLOW (arg2));
1397 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1398 constant. We assume ARG1 and ARG2 have the same data type, or at least
1399 are the same kind of constant and the same machine mode.
1401 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1404 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1409 if (TREE_CODE (arg1) == INTEGER_CST)
1410 return int_const_binop (code, arg1, arg2, notrunc);
1412 if (TREE_CODE (arg1) == REAL_CST)
1414 enum machine_mode mode;
1417 REAL_VALUE_TYPE value;
1420 d1 = TREE_REAL_CST (arg1);
1421 d2 = TREE_REAL_CST (arg2);
1423 type = TREE_TYPE (arg1);
1424 mode = TYPE_MODE (type);
1426 /* Don't perform operation if we honor signaling NaNs and
1427 either operand is a NaN. */
1428 if (HONOR_SNANS (mode)
1429 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1432 /* Don't perform operation if it would raise a division
1433 by zero exception. */
1434 if (code == RDIV_EXPR
1435 && REAL_VALUES_EQUAL (d2, dconst0)
1436 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1439 /* If either operand is a NaN, just return it. Otherwise, set up
1440 for floating-point trap; we return an overflow. */
1441 if (REAL_VALUE_ISNAN (d1))
1443 else if (REAL_VALUE_ISNAN (d2))
1446 REAL_ARITHMETIC (value, code, d1, d2);
1448 t = build_real (type, real_value_truncate (mode, value));
1451 = (force_fit_type (t, 0)
1452 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1453 TREE_CONSTANT_OVERFLOW (t)
1455 | TREE_CONSTANT_OVERFLOW (arg1)
1456 | TREE_CONSTANT_OVERFLOW (arg2);
1459 if (TREE_CODE (arg1) == COMPLEX_CST)
1461 tree type = TREE_TYPE (arg1);
1462 tree r1 = TREE_REALPART (arg1);
1463 tree i1 = TREE_IMAGPART (arg1);
1464 tree r2 = TREE_REALPART (arg2);
1465 tree i2 = TREE_IMAGPART (arg2);
1471 t = build_complex (type,
1472 const_binop (PLUS_EXPR, r1, r2, notrunc),
1473 const_binop (PLUS_EXPR, i1, i2, notrunc));
1477 t = build_complex (type,
1478 const_binop (MINUS_EXPR, r1, r2, notrunc),
1479 const_binop (MINUS_EXPR, i1, i2, notrunc));
1483 t = build_complex (type,
1484 const_binop (MINUS_EXPR,
1485 const_binop (MULT_EXPR,
1487 const_binop (MULT_EXPR,
1490 const_binop (PLUS_EXPR,
1491 const_binop (MULT_EXPR,
1493 const_binop (MULT_EXPR,
1501 = const_binop (PLUS_EXPR,
1502 const_binop (MULT_EXPR, r2, r2, notrunc),
1503 const_binop (MULT_EXPR, i2, i2, notrunc),
1506 t = build_complex (type,
1508 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1509 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1510 const_binop (PLUS_EXPR,
1511 const_binop (MULT_EXPR, r1, r2,
1513 const_binop (MULT_EXPR, i1, i2,
1516 magsquared, notrunc),
1518 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1519 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1520 const_binop (MINUS_EXPR,
1521 const_binop (MULT_EXPR, i1, r2,
1523 const_binop (MULT_EXPR, r1, i2,
1526 magsquared, notrunc));
1538 /* These are the hash table functions for the hash table of INTEGER_CST
1539 nodes of a sizetype. */
1541 /* Return the hash code code X, an INTEGER_CST. */
1544 size_htab_hash (const void *x)
1548 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1549 ^ htab_hash_pointer (TREE_TYPE (t))
1550 ^ (TREE_OVERFLOW (t) << 20));
1553 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1554 is the same as that given by *Y, which is the same. */
1557 size_htab_eq (const void *x, const void *y)
1562 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1563 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1564 && TREE_TYPE (xt) == TREE_TYPE (yt)
1565 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1568 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1569 bits are given by NUMBER and of the sizetype represented by KIND. */
1572 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1574 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1577 /* Likewise, but the desired type is specified explicitly. */
1579 static GTY (()) tree new_const;
1580 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1584 size_int_type_wide (HOST_WIDE_INT number, tree type)
1590 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1591 new_const = make_node (INTEGER_CST);
1594 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1595 hash table, we return the value from the hash table. Otherwise, we
1596 place that in the hash table and make a new node for the next time. */
1597 TREE_INT_CST_LOW (new_const) = number;
1598 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1599 TREE_TYPE (new_const) = type;
1600 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1601 = force_fit_type (new_const, 0);
1603 slot = htab_find_slot (size_htab, new_const, INSERT);
1609 new_const = make_node (INTEGER_CST);
1613 return (tree) *slot;
1616 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1617 is a tree code. The type of the result is taken from the operands.
1618 Both must be the same type integer type and it must be a size type.
1619 If the operands are constant, so is the result. */
1622 size_binop (enum tree_code code, tree arg0, tree arg1)
1624 tree type = TREE_TYPE (arg0);
1626 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1627 || type != TREE_TYPE (arg1))
1630 /* Handle the special case of two integer constants faster. */
1631 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1633 /* And some specific cases even faster than that. */
1634 if (code == PLUS_EXPR && integer_zerop (arg0))
1636 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1637 && integer_zerop (arg1))
1639 else if (code == MULT_EXPR && integer_onep (arg0))
1642 /* Handle general case of two integer constants. */
1643 return int_const_binop (code, arg0, arg1, 0);
1646 if (arg0 == error_mark_node || arg1 == error_mark_node)
1647 return error_mark_node;
1649 return fold (build (code, type, arg0, arg1));
1652 /* Given two values, either both of sizetype or both of bitsizetype,
1653 compute the difference between the two values. Return the value
1654 in signed type corresponding to the type of the operands. */
1657 size_diffop (tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1662 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1663 || type != TREE_TYPE (arg1))
1666 /* If the type is already signed, just do the simple thing. */
1667 if (! TREE_UNSIGNED (type))
1668 return size_binop (MINUS_EXPR, arg0, arg1);
1670 ctype = (type == bitsizetype || type == ubitsizetype
1671 ? sbitsizetype : ssizetype);
1673 /* If either operand is not a constant, do the conversions to the signed
1674 type and subtract. The hardware will do the right thing with any
1675 overflow in the subtraction. */
1676 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1677 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1678 fold_convert (ctype, arg1));
1680 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1681 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1682 overflow) and negate (which can't either). Special-case a result
1683 of zero while we're here. */
1684 if (tree_int_cst_equal (arg0, arg1))
1685 return fold_convert (ctype, integer_zero_node);
1686 else if (tree_int_cst_lt (arg1, arg0))
1687 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1689 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1690 fold_convert (ctype, size_binop (MINUS_EXPR,
1695 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1696 type TYPE. If no simplification can be done return NULL_TREE. */
1699 fold_convert_const (enum tree_code code, tree type, tree arg1)
1704 if (TREE_TYPE (arg1) == type)
1707 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1709 if (TREE_CODE (arg1) == INTEGER_CST)
1711 /* If we would build a constant wider than GCC supports,
1712 leave the conversion unfolded. */
1713 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1716 /* If we are trying to make a sizetype for a small integer, use
1717 size_int to pick up cached types to reduce duplicate nodes. */
1718 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1719 && !TREE_CONSTANT_OVERFLOW (arg1)
1720 && compare_tree_int (arg1, 10000) < 0)
1721 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1723 /* Given an integer constant, make new constant with new type,
1724 appropriately sign-extended or truncated. */
1725 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1726 TREE_INT_CST_HIGH (arg1));
1727 TREE_TYPE (t) = type;
1728 /* Indicate an overflow if (1) ARG1 already overflowed,
1729 or (2) force_fit_type indicates an overflow.
1730 Tell force_fit_type that an overflow has already occurred
1731 if ARG1 is a too-large unsigned value and T is signed.
1732 But don't indicate an overflow if converting a pointer. */
1734 = ((force_fit_type (t,
1735 (TREE_INT_CST_HIGH (arg1) < 0
1736 && (TREE_UNSIGNED (type)
1737 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1738 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1739 || TREE_OVERFLOW (arg1));
1740 TREE_CONSTANT_OVERFLOW (t)
1741 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1744 else if (TREE_CODE (arg1) == REAL_CST)
1746 /* The following code implements the floating point to integer
1747 conversion rules required by the Java Language Specification,
1748 that IEEE NaNs are mapped to zero and values that overflow
1749 the target precision saturate, i.e. values greater than
1750 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1751 are mapped to INT_MIN. These semantics are allowed by the
1752 C and C++ standards that simply state that the behavior of
1753 FP-to-integer conversion is unspecified upon overflow. */
1755 HOST_WIDE_INT high, low;
1758 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1762 case FIX_TRUNC_EXPR:
1763 real_trunc (&r, VOIDmode, &x);
1767 real_ceil (&r, VOIDmode, &x);
1770 case FIX_FLOOR_EXPR:
1771 real_floor (&r, VOIDmode, &x);
1778 /* If R is NaN, return zero and show we have an overflow. */
1779 if (REAL_VALUE_ISNAN (r))
1786 /* See if R is less than the lower bound or greater than the
1791 tree lt = TYPE_MIN_VALUE (type);
1792 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1793 if (REAL_VALUES_LESS (r, l))
1796 high = TREE_INT_CST_HIGH (lt);
1797 low = TREE_INT_CST_LOW (lt);
1803 tree ut = TYPE_MAX_VALUE (type);
1806 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1807 if (REAL_VALUES_LESS (u, r))
1810 high = TREE_INT_CST_HIGH (ut);
1811 low = TREE_INT_CST_LOW (ut);
1817 REAL_VALUE_TO_INT (&low, &high, r);
1819 t = build_int_2 (low, high);
1820 TREE_TYPE (t) = type;
1822 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1823 TREE_CONSTANT_OVERFLOW (t)
1824 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1828 else if (TREE_CODE (type) == REAL_TYPE)
1830 if (TREE_CODE (arg1) == INTEGER_CST)
1831 return build_real_from_int_cst (type, arg1);
1832 if (TREE_CODE (arg1) == REAL_CST)
1834 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1836 /* We make a copy of ARG1 so that we don't modify an
1837 existing constant tree. */
1838 t = copy_node (arg1);
1839 TREE_TYPE (t) = type;
1843 t = build_real (type,
1844 real_value_truncate (TYPE_MODE (type),
1845 TREE_REAL_CST (arg1)));
1848 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1849 TREE_CONSTANT_OVERFLOW (t)
1850 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1861 fold_convert (tree type, tree arg)
1863 tree orig = TREE_TYPE (arg);
1869 if (TREE_CODE (arg) == ERROR_MARK
1870 || TREE_CODE (type) == ERROR_MARK
1871 || TREE_CODE (orig) == ERROR_MARK)
1872 return error_mark_node;
1874 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1875 return fold (build1 (NOP_EXPR, type, arg));
1877 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1879 if (TREE_CODE (arg) == INTEGER_CST)
1881 tem = fold_convert_const (NOP_EXPR, type, arg);
1882 if (tem != NULL_TREE)
1885 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1886 return fold (build1 (NOP_EXPR, type, arg));
1887 if (TREE_CODE (orig) == COMPLEX_TYPE)
1889 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1890 return fold_convert (type, tem);
1892 if (TREE_CODE (orig) == VECTOR_TYPE
1893 && GET_MODE_SIZE (TYPE_MODE (type))
1894 == GET_MODE_SIZE (TYPE_MODE (orig)))
1895 return fold (build1 (NOP_EXPR, type, arg));
1897 else if (TREE_CODE (type) == REAL_TYPE)
1899 if (TREE_CODE (arg) == INTEGER_CST)
1901 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1902 if (tem != NULL_TREE)
1905 else if (TREE_CODE (arg) == REAL_CST)
1907 tem = fold_convert_const (NOP_EXPR, type, arg);
1908 if (tem != NULL_TREE)
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1913 return fold (build1 (FLOAT_EXPR, type, arg));
1914 if (TREE_CODE (orig) == REAL_TYPE)
1915 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1917 if (TREE_CODE (orig) == COMPLEX_TYPE)
1919 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1920 return fold_convert (type, tem);
1923 else if (TREE_CODE (type) == COMPLEX_TYPE)
1925 if (INTEGRAL_TYPE_P (orig)
1926 || POINTER_TYPE_P (orig)
1927 || TREE_CODE (orig) == REAL_TYPE)
1928 return build (COMPLEX_EXPR, type,
1929 fold_convert (TREE_TYPE (type), arg),
1930 fold_convert (TREE_TYPE (type), integer_zero_node));
1931 if (TREE_CODE (orig) == COMPLEX_TYPE)
1935 if (TREE_CODE (arg) == COMPLEX_EXPR)
1937 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1938 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1939 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1942 arg = save_expr (arg);
1943 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1944 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1945 rpart = fold_convert (TREE_TYPE (type), rpart);
1946 ipart = fold_convert (TREE_TYPE (type), ipart);
1947 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1950 else if (TREE_CODE (type) == VECTOR_TYPE)
1952 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1953 && GET_MODE_SIZE (TYPE_MODE (type))
1954 == GET_MODE_SIZE (TYPE_MODE (orig)))
1955 return fold (build1 (NOP_EXPR, type, arg));
1956 if (TREE_CODE (orig) == VECTOR_TYPE
1957 && GET_MODE_SIZE (TYPE_MODE (type))
1958 == GET_MODE_SIZE (TYPE_MODE (orig)))
1959 return fold (build1 (NOP_EXPR, type, arg));
1961 else if (VOID_TYPE_P (type))
1962 return fold (build1 (CONVERT_EXPR, type, arg));
1966 /* Return an expr equal to X but certainly not valid as an lvalue. */
1973 /* These things are certainly not lvalues. */
1974 if (TREE_CODE (x) == NON_LVALUE_EXPR
1975 || TREE_CODE (x) == INTEGER_CST
1976 || TREE_CODE (x) == REAL_CST
1977 || TREE_CODE (x) == STRING_CST
1978 || TREE_CODE (x) == ADDR_EXPR)
1981 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1982 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1986 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1987 Zero means allow extended lvalues. */
1989 int pedantic_lvalues;
1991 /* When pedantic, return an expr equal to X but certainly not valid as a
1992 pedantic lvalue. Otherwise, return X. */
1995 pedantic_non_lvalue (tree x)
1997 if (pedantic_lvalues)
1998 return non_lvalue (x);
2003 /* Given a tree comparison code, return the code that is the logical inverse
2004 of the given code. It is not safe to do this for floating-point
2005 comparisons, except for NE_EXPR and EQ_EXPR. */
2007 static enum tree_code
2008 invert_tree_comparison (enum tree_code code)
2029 /* Similar, but return the comparison that results if the operands are
2030 swapped. This is safe for floating-point. */
2032 static enum tree_code
2033 swap_tree_comparison (enum tree_code code)
2054 /* Convert a comparison tree code from an enum tree_code representation
2055 into a compcode bit-based encoding. This function is the inverse of
2056 compcode_to_comparison. */
2059 comparison_to_compcode (enum tree_code code)
2080 /* Convert a compcode bit-based encoding of a comparison operator back
2081 to GCC's enum tree_code representation. This function is the
2082 inverse of comparison_to_compcode. */
2084 static enum tree_code
2085 compcode_to_comparison (int code)
2106 /* Return nonzero if CODE is a tree code that represents a truth value. */
2109 truth_value_p (enum tree_code code)
2111 return (TREE_CODE_CLASS (code) == '<'
2112 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2113 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2114 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2117 /* Return nonzero if two operands (typically of the same tree node)
2118 are necessarily equal. If either argument has side-effects this
2119 function returns zero.
2121 If ONLY_CONST is nonzero, only return nonzero for constants.
2122 This function tests whether the operands are indistinguishable;
2123 it does not test whether they are equal using C's == operation.
2124 The distinction is important for IEEE floating point, because
2125 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2126 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2128 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2129 even though it may hold multiple values during a function.
2130 This is because a GCC tree node guarantees that nothing else is
2131 executed between the evaluation of its "operands" (which may often
2132 be evaluated in arbitrary order). Hence if the operands themselves
2133 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2134 same value in each operand/subexpression. Hence a zero value for
2135 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2136 If comparing arbitrary expression trees, such as from different
2137 statements, ONLY_CONST must usually be nonzero. */
2140 operand_equal_p (tree arg0, tree arg1, int only_const)
2144 /* If both types don't have the same signedness, then we can't consider
2145 them equal. We must check this before the STRIP_NOPS calls
2146 because they may change the signedness of the arguments. */
2147 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2153 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2154 /* This is needed for conversions and for COMPONENT_REF.
2155 Might as well play it safe and always test this. */
2156 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2157 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2158 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2161 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2162 We don't care about side effects in that case because the SAVE_EXPR
2163 takes care of that for us. In all other cases, two expressions are
2164 equal if they have no side effects. If we have two identical
2165 expressions with side effects that should be treated the same due
2166 to the only side effects being identical SAVE_EXPR's, that will
2167 be detected in the recursive calls below. */
2168 if (arg0 == arg1 && ! only_const
2169 && (TREE_CODE (arg0) == SAVE_EXPR
2170 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2173 /* Next handle constant cases, those for which we can return 1 even
2174 if ONLY_CONST is set. */
2175 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2176 switch (TREE_CODE (arg0))
2179 return (! TREE_CONSTANT_OVERFLOW (arg0)
2180 && ! TREE_CONSTANT_OVERFLOW (arg1)
2181 && tree_int_cst_equal (arg0, arg1));
2184 return (! TREE_CONSTANT_OVERFLOW (arg0)
2185 && ! TREE_CONSTANT_OVERFLOW (arg1)
2186 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2187 TREE_REAL_CST (arg1)));
2193 if (TREE_CONSTANT_OVERFLOW (arg0)
2194 || TREE_CONSTANT_OVERFLOW (arg1))
2197 v1 = TREE_VECTOR_CST_ELTS (arg0);
2198 v2 = TREE_VECTOR_CST_ELTS (arg1);
2201 if (!operand_equal_p (v1, v2, only_const))
2203 v1 = TREE_CHAIN (v1);
2204 v2 = TREE_CHAIN (v2);
2211 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2213 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2217 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2218 && ! memcmp (TREE_STRING_POINTER (arg0),
2219 TREE_STRING_POINTER (arg1),
2220 TREE_STRING_LENGTH (arg0)));
2223 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2232 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2235 /* Two conversions are equal only if signedness and modes match. */
2236 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2237 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2238 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2241 return operand_equal_p (TREE_OPERAND (arg0, 0),
2242 TREE_OPERAND (arg1, 0), 0);
2246 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2247 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2251 /* For commutative ops, allow the other order. */
2252 return (commutative_tree_code (TREE_CODE (arg0))
2253 && operand_equal_p (TREE_OPERAND (arg0, 0),
2254 TREE_OPERAND (arg1, 1), 0)
2255 && operand_equal_p (TREE_OPERAND (arg0, 1),
2256 TREE_OPERAND (arg1, 0), 0));
2259 /* If either of the pointer (or reference) expressions we are
2260 dereferencing contain a side effect, these cannot be equal. */
2261 if (TREE_SIDE_EFFECTS (arg0)
2262 || TREE_SIDE_EFFECTS (arg1))
2265 switch (TREE_CODE (arg0))
2268 return operand_equal_p (TREE_OPERAND (arg0, 0),
2269 TREE_OPERAND (arg1, 0), 0);
2273 case ARRAY_RANGE_REF:
2274 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2275 TREE_OPERAND (arg1, 0), 0)
2276 && operand_equal_p (TREE_OPERAND (arg0, 1),
2277 TREE_OPERAND (arg1, 1), 0));
2280 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2281 TREE_OPERAND (arg1, 0), 0)
2282 && operand_equal_p (TREE_OPERAND (arg0, 1),
2283 TREE_OPERAND (arg1, 1), 0)
2284 && operand_equal_p (TREE_OPERAND (arg0, 2),
2285 TREE_OPERAND (arg1, 2), 0));
2291 switch (TREE_CODE (arg0))
2294 case TRUTH_NOT_EXPR:
2295 return operand_equal_p (TREE_OPERAND (arg0, 0),
2296 TREE_OPERAND (arg1, 0), 0);
2299 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2302 /* If the CALL_EXPRs call different functions, then they
2303 clearly can not be equal. */
2304 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2305 TREE_OPERAND (arg1, 0), 0))
2308 /* Only consider const functions equivalent. */
2309 fndecl = get_callee_fndecl (arg0);
2310 if (fndecl == NULL_TREE
2311 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2314 /* Now see if all the arguments are the same. operand_equal_p
2315 does not handle TREE_LIST, so we walk the operands here
2316 feeding them to operand_equal_p. */
2317 arg0 = TREE_OPERAND (arg0, 1);
2318 arg1 = TREE_OPERAND (arg1, 1);
2319 while (arg0 && arg1)
2321 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2324 arg0 = TREE_CHAIN (arg0);
2325 arg1 = TREE_CHAIN (arg1);
2328 /* If we get here and both argument lists are exhausted
2329 then the CALL_EXPRs are equal. */
2330 return ! (arg0 || arg1);
2337 /* Consider __builtin_sqrt equal to sqrt. */
2338 return TREE_CODE (arg0) == FUNCTION_DECL
2339 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2340 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2341 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2348 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2349 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2351 When in doubt, return 0. */
2354 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2356 int unsignedp1, unsignedpo;
2357 tree primarg0, primarg1, primother;
2358 unsigned int correct_width;
2360 if (operand_equal_p (arg0, arg1, 0))
2363 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2364 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2367 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2368 and see if the inner values are the same. This removes any
2369 signedness comparison, which doesn't matter here. */
2370 primarg0 = arg0, primarg1 = arg1;
2371 STRIP_NOPS (primarg0);
2372 STRIP_NOPS (primarg1);
2373 if (operand_equal_p (primarg0, primarg1, 0))
2376 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2377 actual comparison operand, ARG0.
2379 First throw away any conversions to wider types
2380 already present in the operands. */
2382 primarg1 = get_narrower (arg1, &unsignedp1);
2383 primother = get_narrower (other, &unsignedpo);
2385 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2386 if (unsignedp1 == unsignedpo
2387 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2388 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2390 tree type = TREE_TYPE (arg0);
2392 /* Make sure shorter operand is extended the right way
2393 to match the longer operand. */
2394 primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2395 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2397 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2404 /* See if ARG is an expression that is either a comparison or is performing
2405 arithmetic on comparisons. The comparisons must only be comparing
2406 two different values, which will be stored in *CVAL1 and *CVAL2; if
2407 they are nonzero it means that some operands have already been found.
2408 No variables may be used anywhere else in the expression except in the
2409 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2410 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2412 If this is true, return 1. Otherwise, return zero. */
2415 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2417 enum tree_code code = TREE_CODE (arg);
2418 char class = TREE_CODE_CLASS (code);
2420 /* We can handle some of the 'e' cases here. */
2421 if (class == 'e' && code == TRUTH_NOT_EXPR)
2423 else if (class == 'e'
2424 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2425 || code == COMPOUND_EXPR))
2428 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2429 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2431 /* If we've already found a CVAL1 or CVAL2, this expression is
2432 two complex to handle. */
2433 if (*cval1 || *cval2)
2443 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2446 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2447 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2448 cval1, cval2, save_p));
2454 if (code == COND_EXPR)
2455 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2456 cval1, cval2, save_p)
2457 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2458 cval1, cval2, save_p)
2459 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2460 cval1, cval2, save_p));
2464 /* First see if we can handle the first operand, then the second. For
2465 the second operand, we know *CVAL1 can't be zero. It must be that
2466 one side of the comparison is each of the values; test for the
2467 case where this isn't true by failing if the two operands
2470 if (operand_equal_p (TREE_OPERAND (arg, 0),
2471 TREE_OPERAND (arg, 1), 0))
2475 *cval1 = TREE_OPERAND (arg, 0);
2476 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2478 else if (*cval2 == 0)
2479 *cval2 = TREE_OPERAND (arg, 0);
2480 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2485 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2487 else if (*cval2 == 0)
2488 *cval2 = TREE_OPERAND (arg, 1);
2489 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2501 /* ARG is a tree that is known to contain just arithmetic operations and
2502 comparisons. Evaluate the operations in the tree substituting NEW0 for
2503 any occurrence of OLD0 as an operand of a comparison and likewise for
2507 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2509 tree type = TREE_TYPE (arg);
2510 enum tree_code code = TREE_CODE (arg);
2511 char class = TREE_CODE_CLASS (code);
2513 /* We can handle some of the 'e' cases here. */
2514 if (class == 'e' && code == TRUTH_NOT_EXPR)
2516 else if (class == 'e'
2517 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2523 return fold (build1 (code, type,
2524 eval_subst (TREE_OPERAND (arg, 0),
2525 old0, new0, old1, new1)));
2528 return fold (build (code, type,
2529 eval_subst (TREE_OPERAND (arg, 0),
2530 old0, new0, old1, new1),
2531 eval_subst (TREE_OPERAND (arg, 1),
2532 old0, new0, old1, new1)));
2538 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2541 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2544 return fold (build (code, type,
2545 eval_subst (TREE_OPERAND (arg, 0),
2546 old0, new0, old1, new1),
2547 eval_subst (TREE_OPERAND (arg, 1),
2548 old0, new0, old1, new1),
2549 eval_subst (TREE_OPERAND (arg, 2),
2550 old0, new0, old1, new1)));
2554 /* Fall through - ??? */
2558 tree arg0 = TREE_OPERAND (arg, 0);
2559 tree arg1 = TREE_OPERAND (arg, 1);
2561 /* We need to check both for exact equality and tree equality. The
2562 former will be true if the operand has a side-effect. In that
2563 case, we know the operand occurred exactly once. */
2565 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2567 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2570 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2572 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2575 return fold (build (code, type, arg0, arg1));
2583 /* Return a tree for the case when the result of an expression is RESULT
2584 converted to TYPE and OMITTED was previously an operand of the expression
2585 but is now not needed (e.g., we folded OMITTED * 0).
2587 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2588 the conversion of RESULT to TYPE. */
2591 omit_one_operand (tree type, tree result, tree omitted)
2593 tree t = fold_convert (type, result);
2595 if (TREE_SIDE_EFFECTS (omitted))
2596 return build (COMPOUND_EXPR, type, omitted, t);
2598 return non_lvalue (t);
2601 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2604 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2606 tree t = fold_convert (type, result);
2608 if (TREE_SIDE_EFFECTS (omitted))
2609 return build (COMPOUND_EXPR, type, omitted, t);
2611 return pedantic_non_lvalue (t);
2614 /* Return a simplified tree node for the truth-negation of ARG. This
2615 never alters ARG itself. We assume that ARG is an operation that
2616 returns a truth value (0 or 1). */
2619 invert_truthvalue (tree arg)
2621 tree type = TREE_TYPE (arg);
2622 enum tree_code code = TREE_CODE (arg);
2624 if (code == ERROR_MARK)
2627 /* If this is a comparison, we can simply invert it, except for
2628 floating-point non-equality comparisons, in which case we just
2629 enclose a TRUTH_NOT_EXPR around what we have. */
2631 if (TREE_CODE_CLASS (code) == '<')
2633 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2634 && !flag_unsafe_math_optimizations
2637 return build1 (TRUTH_NOT_EXPR, type, arg);
2639 return build (invert_tree_comparison (code), type,
2640 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2646 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2648 case TRUTH_AND_EXPR:
2649 return build (TRUTH_OR_EXPR, type,
2650 invert_truthvalue (TREE_OPERAND (arg, 0)),
2651 invert_truthvalue (TREE_OPERAND (arg, 1)));
2654 return build (TRUTH_AND_EXPR, type,
2655 invert_truthvalue (TREE_OPERAND (arg, 0)),
2656 invert_truthvalue (TREE_OPERAND (arg, 1)));
2658 case TRUTH_XOR_EXPR:
2659 /* Here we can invert either operand. We invert the first operand
2660 unless the second operand is a TRUTH_NOT_EXPR in which case our
2661 result is the XOR of the first operand with the inside of the
2662 negation of the second operand. */
2664 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2665 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2666 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2668 return build (TRUTH_XOR_EXPR, type,
2669 invert_truthvalue (TREE_OPERAND (arg, 0)),
2670 TREE_OPERAND (arg, 1));
2672 case TRUTH_ANDIF_EXPR:
2673 return build (TRUTH_ORIF_EXPR, type,
2674 invert_truthvalue (TREE_OPERAND (arg, 0)),
2675 invert_truthvalue (TREE_OPERAND (arg, 1)));
2677 case TRUTH_ORIF_EXPR:
2678 return build (TRUTH_ANDIF_EXPR, type,
2679 invert_truthvalue (TREE_OPERAND (arg, 0)),
2680 invert_truthvalue (TREE_OPERAND (arg, 1)));
2682 case TRUTH_NOT_EXPR:
2683 return TREE_OPERAND (arg, 0);
2686 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2687 invert_truthvalue (TREE_OPERAND (arg, 1)),
2688 invert_truthvalue (TREE_OPERAND (arg, 2)));
2691 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2692 invert_truthvalue (TREE_OPERAND (arg, 1)));
2694 case WITH_RECORD_EXPR:
2695 return build (WITH_RECORD_EXPR, type,
2696 invert_truthvalue (TREE_OPERAND (arg, 0)),
2697 TREE_OPERAND (arg, 1));
2699 case NON_LVALUE_EXPR:
2700 return invert_truthvalue (TREE_OPERAND (arg, 0));
2705 return build1 (TREE_CODE (arg), type,
2706 invert_truthvalue (TREE_OPERAND (arg, 0)));
2709 if (!integer_onep (TREE_OPERAND (arg, 1)))
2711 return build (EQ_EXPR, type, arg,
2712 fold_convert (type, integer_zero_node));
2715 return build1 (TRUTH_NOT_EXPR, type, arg);
2717 case CLEANUP_POINT_EXPR:
2718 return build1 (CLEANUP_POINT_EXPR, type,
2719 invert_truthvalue (TREE_OPERAND (arg, 0)));
2724 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2726 return build1 (TRUTH_NOT_EXPR, type, arg);
2729 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2730 operands are another bit-wise operation with a common input. If so,
2731 distribute the bit operations to save an operation and possibly two if
2732 constants are involved. For example, convert
2733 (A | B) & (A | C) into A | (B & C)
2734 Further simplification will occur if B and C are constants.
2736 If this optimization cannot be done, 0 will be returned. */
2739 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2744 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2745 || TREE_CODE (arg0) == code
2746 || (TREE_CODE (arg0) != BIT_AND_EXPR
2747 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2750 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2752 common = TREE_OPERAND (arg0, 0);
2753 left = TREE_OPERAND (arg0, 1);
2754 right = TREE_OPERAND (arg1, 1);
2756 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2758 common = TREE_OPERAND (arg0, 0);
2759 left = TREE_OPERAND (arg0, 1);
2760 right = TREE_OPERAND (arg1, 0);
2762 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2764 common = TREE_OPERAND (arg0, 1);
2765 left = TREE_OPERAND (arg0, 0);
2766 right = TREE_OPERAND (arg1, 1);
2768 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2770 common = TREE_OPERAND (arg0, 1);
2771 left = TREE_OPERAND (arg0, 0);
2772 right = TREE_OPERAND (arg1, 0);
2777 return fold (build (TREE_CODE (arg0), type, common,
2778 fold (build (code, type, left, right))));
2781 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2782 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2785 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2788 tree result = build (BIT_FIELD_REF, type, inner,
2789 size_int (bitsize), bitsize_int (bitpos));
2791 TREE_UNSIGNED (result) = unsignedp;
2796 /* Optimize a bit-field compare.
2798 There are two cases: First is a compare against a constant and the
2799 second is a comparison of two items where the fields are at the same
2800 bit position relative to the start of a chunk (byte, halfword, word)
2801 large enough to contain it. In these cases we can avoid the shift
2802 implicit in bitfield extractions.
2804 For constants, we emit a compare of the shifted constant with the
2805 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2806 compared. For two fields at the same position, we do the ANDs with the
2807 similar mask and compare the result of the ANDs.
2809 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2810 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2811 are the left and right operands of the comparison, respectively.
2813 If the optimization described above can be done, we return the resulting
2814 tree. Otherwise we return zero. */
2817 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2820 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2821 tree type = TREE_TYPE (lhs);
2822 tree signed_type, unsigned_type;
2823 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2824 enum machine_mode lmode, rmode, nmode;
2825 int lunsignedp, runsignedp;
2826 int lvolatilep = 0, rvolatilep = 0;
2827 tree linner, rinner = NULL_TREE;
2831 /* Get all the information about the extractions being done. If the bit size
2832 if the same as the size of the underlying object, we aren't doing an
2833 extraction at all and so can do nothing. We also don't want to
2834 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2835 then will no longer be able to replace it. */
2836 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2837 &lunsignedp, &lvolatilep);
2838 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2839 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2844 /* If this is not a constant, we can only do something if bit positions,
2845 sizes, and signedness are the same. */
2846 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2847 &runsignedp, &rvolatilep);
2849 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2850 || lunsignedp != runsignedp || offset != 0
2851 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2855 /* See if we can find a mode to refer to this field. We should be able to,
2856 but fail if we can't. */
2857 nmode = get_best_mode (lbitsize, lbitpos,
2858 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2859 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2860 TYPE_ALIGN (TREE_TYPE (rinner))),
2861 word_mode, lvolatilep || rvolatilep);
2862 if (nmode == VOIDmode)
2865 /* Set signed and unsigned types of the precision of this mode for the
2867 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2868 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2870 /* Compute the bit position and size for the new reference and our offset
2871 within it. If the new reference is the same size as the original, we
2872 won't optimize anything, so return zero. */
2873 nbitsize = GET_MODE_BITSIZE (nmode);
2874 nbitpos = lbitpos & ~ (nbitsize - 1);
2876 if (nbitsize == lbitsize)
2879 if (BYTES_BIG_ENDIAN)
2880 lbitpos = nbitsize - lbitsize - lbitpos;
2882 /* Make the mask to be used against the extracted field. */
2883 mask = build_int_2 (~0, ~0);
2884 TREE_TYPE (mask) = unsigned_type;
2885 force_fit_type (mask, 0);
2886 mask = fold_convert (unsigned_type, mask);
2887 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2888 mask = const_binop (RSHIFT_EXPR, mask,
2889 size_int (nbitsize - lbitsize - lbitpos), 0);
2892 /* If not comparing with constant, just rework the comparison
2894 return build (code, compare_type,
2895 build (BIT_AND_EXPR, unsigned_type,
2896 make_bit_field_ref (linner, unsigned_type,
2897 nbitsize, nbitpos, 1),
2899 build (BIT_AND_EXPR, unsigned_type,
2900 make_bit_field_ref (rinner, unsigned_type,
2901 nbitsize, nbitpos, 1),
2904 /* Otherwise, we are handling the constant case. See if the constant is too
2905 big for the field. Warn and return a tree of for 0 (false) if so. We do
2906 this not only for its own sake, but to avoid having to test for this
2907 error case below. If we didn't, we might generate wrong code.
2909 For unsigned fields, the constant shifted right by the field length should
2910 be all zero. For signed fields, the high-order bits should agree with
2915 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2916 fold_convert (unsigned_type, rhs),
2917 size_int (lbitsize), 0)))
2919 warning ("comparison is always %d due to width of bit-field",
2921 return fold_convert (compare_type,
2923 ? integer_one_node : integer_zero_node));
2928 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2929 size_int (lbitsize - 1), 0);
2930 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2932 warning ("comparison is always %d due to width of bit-field",
2934 return fold_convert (compare_type,
2936 ? integer_one_node : integer_zero_node));
2940 /* Single-bit compares should always be against zero. */
2941 if (lbitsize == 1 && ! integer_zerop (rhs))
2943 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2944 rhs = fold_convert (type, integer_zero_node);
2947 /* Make a new bitfield reference, shift the constant over the
2948 appropriate number of bits and mask it with the computed mask
2949 (in case this was a signed field). If we changed it, make a new one. */
2950 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2953 TREE_SIDE_EFFECTS (lhs) = 1;
2954 TREE_THIS_VOLATILE (lhs) = 1;
2957 rhs = fold (const_binop (BIT_AND_EXPR,
2958 const_binop (LSHIFT_EXPR,
2959 fold_convert (unsigned_type, rhs),
2960 size_int (lbitpos), 0),
2963 return build (code, compare_type,
2964 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2968 /* Subroutine for fold_truthop: decode a field reference.
2970 If EXP is a comparison reference, we return the innermost reference.
2972 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2973 set to the starting bit number.
2975 If the innermost field can be completely contained in a mode-sized
2976 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2978 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2979 otherwise it is not changed.
2981 *PUNSIGNEDP is set to the signedness of the field.
2983 *PMASK is set to the mask used. This is either contained in a
2984 BIT_AND_EXPR or derived from the width of the field.
2986 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2988 Return 0 if this is not a component reference or is one that we can't
2989 do anything with. */
2992 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2993 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2994 int *punsignedp, int *pvolatilep,
2995 tree *pmask, tree *pand_mask)
2997 tree outer_type = 0;
2999 tree mask, inner, offset;
3001 unsigned int precision;
3003 /* All the optimizations using this function assume integer fields.
3004 There are problems with FP fields since the type_for_size call
3005 below can fail for, e.g., XFmode. */
3006 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3009 /* We are interested in the bare arrangement of bits, so strip everything
3010 that doesn't affect the machine mode. However, record the type of the
3011 outermost expression if it may matter below. */
3012 if (TREE_CODE (exp) == NOP_EXPR
3013 || TREE_CODE (exp) == CONVERT_EXPR
3014 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3015 outer_type = TREE_TYPE (exp);
3018 if (TREE_CODE (exp) == BIT_AND_EXPR)
3020 and_mask = TREE_OPERAND (exp, 1);
3021 exp = TREE_OPERAND (exp, 0);
3022 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3023 if (TREE_CODE (and_mask) != INTEGER_CST)
3027 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3028 punsignedp, pvolatilep);
3029 if ((inner == exp && and_mask == 0)
3030 || *pbitsize < 0 || offset != 0
3031 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3034 /* If the number of bits in the reference is the same as the bitsize of
3035 the outer type, then the outer type gives the signedness. Otherwise
3036 (in case of a small bitfield) the signedness is unchanged. */
3037 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3038 *punsignedp = TREE_UNSIGNED (outer_type);
3040 /* Compute the mask to access the bitfield. */
3041 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3042 precision = TYPE_PRECISION (unsigned_type);
3044 mask = build_int_2 (~0, ~0);
3045 TREE_TYPE (mask) = unsigned_type;
3046 force_fit_type (mask, 0);
3047 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3048 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3050 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3052 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3053 fold_convert (unsigned_type, and_mask), mask));
3056 *pand_mask = and_mask;
3060 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3064 all_ones_mask_p (tree mask, int size)
3066 tree type = TREE_TYPE (mask);
3067 unsigned int precision = TYPE_PRECISION (type);
3070 tmask = build_int_2 (~0, ~0);
3071 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3072 force_fit_type (tmask, 0);
3074 tree_int_cst_equal (mask,
3075 const_binop (RSHIFT_EXPR,
3076 const_binop (LSHIFT_EXPR, tmask,
3077 size_int (precision - size),
3079 size_int (precision - size), 0));
3082 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3083 represents the sign bit of EXP's type. If EXP represents a sign
3084 or zero extension, also test VAL against the unextended type.
3085 The return value is the (sub)expression whose sign bit is VAL,
3086 or NULL_TREE otherwise. */
3089 sign_bit_p (tree exp, tree val)
3091 unsigned HOST_WIDE_INT mask_lo, lo;
3092 HOST_WIDE_INT mask_hi, hi;
3096 /* Tree EXP must have an integral type. */
3097 t = TREE_TYPE (exp);
3098 if (! INTEGRAL_TYPE_P (t))
3101 /* Tree VAL must be an integer constant. */
3102 if (TREE_CODE (val) != INTEGER_CST
3103 || TREE_CONSTANT_OVERFLOW (val))
3106 width = TYPE_PRECISION (t);
3107 if (width > HOST_BITS_PER_WIDE_INT)
3109 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3112 mask_hi = ((unsigned HOST_WIDE_INT) -1
3113 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3119 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3122 mask_lo = ((unsigned HOST_WIDE_INT) -1
3123 >> (HOST_BITS_PER_WIDE_INT - width));
3126 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3127 treat VAL as if it were unsigned. */
3128 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3129 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3132 /* Handle extension from a narrower type. */
3133 if (TREE_CODE (exp) == NOP_EXPR
3134 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3135 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3140 /* Subroutine for fold_truthop: determine if an operand is simple enough
3141 to be evaluated unconditionally. */
3144 simple_operand_p (tree exp)
3146 /* Strip any conversions that don't change the machine mode. */
3147 while ((TREE_CODE (exp) == NOP_EXPR
3148 || TREE_CODE (exp) == CONVERT_EXPR)
3149 && (TYPE_MODE (TREE_TYPE (exp))
3150 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3151 exp = TREE_OPERAND (exp, 0);
3153 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3155 && ! TREE_ADDRESSABLE (exp)
3156 && ! TREE_THIS_VOLATILE (exp)
3157 && ! DECL_NONLOCAL (exp)
3158 /* Don't regard global variables as simple. They may be
3159 allocated in ways unknown to the compiler (shared memory,
3160 #pragma weak, etc). */
3161 && ! TREE_PUBLIC (exp)
3162 && ! DECL_EXTERNAL (exp)
3163 /* Loading a static variable is unduly expensive, but global
3164 registers aren't expensive. */
3165 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3168 /* The following functions are subroutines to fold_range_test and allow it to
3169 try to change a logical combination of comparisons into a range test.
3172 X == 2 || X == 3 || X == 4 || X == 5
3176 (unsigned) (X - 2) <= 3
3178 We describe each set of comparisons as being either inside or outside
3179 a range, using a variable named like IN_P, and then describe the
3180 range with a lower and upper bound. If one of the bounds is omitted,
3181 it represents either the highest or lowest value of the type.
3183 In the comments below, we represent a range by two numbers in brackets
3184 preceded by a "+" to designate being inside that range, or a "-" to
3185 designate being outside that range, so the condition can be inverted by
3186 flipping the prefix. An omitted bound is represented by a "-". For
3187 example, "- [-, 10]" means being outside the range starting at the lowest
3188 possible value and ending at 10, in other words, being greater than 10.
3189 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3192 We set up things so that the missing bounds are handled in a consistent
3193 manner so neither a missing bound nor "true" and "false" need to be
3194 handled using a special case. */
3196 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3197 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3198 and UPPER1_P are nonzero if the respective argument is an upper bound
3199 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3200 must be specified for a comparison. ARG1 will be converted to ARG0's
3201 type if both are specified. */
3204 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3205 tree arg1, int upper1_p)
3211 /* If neither arg represents infinity, do the normal operation.
3212 Else, if not a comparison, return infinity. Else handle the special
3213 comparison rules. Note that most of the cases below won't occur, but
3214 are handled for consistency. */
3216 if (arg0 != 0 && arg1 != 0)
3218 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3219 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3221 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3224 if (TREE_CODE_CLASS (code) != '<')
3227 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3228 for neither. In real maths, we cannot assume open ended ranges are
3229 the same. But, this is computer arithmetic, where numbers are finite.
3230 We can therefore make the transformation of any unbounded range with
3231 the value Z, Z being greater than any representable number. This permits
3232 us to treat unbounded ranges as equal. */
3233 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3234 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3238 result = sgn0 == sgn1;
3241 result = sgn0 != sgn1;
3244 result = sgn0 < sgn1;
3247 result = sgn0 <= sgn1;
3250 result = sgn0 > sgn1;
3253 result = sgn0 >= sgn1;
3259 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3262 /* Given EXP, a logical expression, set the range it is testing into
3263 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3264 actually being tested. *PLOW and *PHIGH will be made of the same type
3265 as the returned expression. If EXP is not a comparison, we will most
3266 likely not be returning a useful value and range. */
3269 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3271 enum tree_code code;
3272 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3273 tree orig_type = NULL_TREE;
3275 tree low, high, n_low, n_high;
3277 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3278 and see if we can refine the range. Some of the cases below may not
3279 happen, but it doesn't seem worth worrying about this. We "continue"
3280 the outer loop when we've changed something; otherwise we "break"
3281 the switch, which will "break" the while. */
3284 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3288 code = TREE_CODE (exp);
3290 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3292 if (first_rtl_op (code) > 0)
3293 arg0 = TREE_OPERAND (exp, 0);
3294 if (TREE_CODE_CLASS (code) == '<'
3295 || TREE_CODE_CLASS (code) == '1'
3296 || TREE_CODE_CLASS (code) == '2')
3297 type = TREE_TYPE (arg0);
3298 if (TREE_CODE_CLASS (code) == '2'
3299 || TREE_CODE_CLASS (code) == '<'
3300 || (TREE_CODE_CLASS (code) == 'e'
3301 && TREE_CODE_LENGTH (code) > 1))
3302 arg1 = TREE_OPERAND (exp, 1);
3305 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3306 lose a cast by accident. */
3307 if (type != NULL_TREE && orig_type == NULL_TREE)
3312 case TRUTH_NOT_EXPR:
3313 in_p = ! in_p, exp = arg0;
3316 case EQ_EXPR: case NE_EXPR:
3317 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3318 /* We can only do something if the range is testing for zero
3319 and if the second operand is an integer constant. Note that
3320 saying something is "in" the range we make is done by
3321 complementing IN_P since it will set in the initial case of
3322 being not equal to zero; "out" is leaving it alone. */
3323 if (low == 0 || high == 0
3324 || ! integer_zerop (low) || ! integer_zerop (high)
3325 || TREE_CODE (arg1) != INTEGER_CST)
3330 case NE_EXPR: /* - [c, c] */
3333 case EQ_EXPR: /* + [c, c] */
3334 in_p = ! in_p, low = high = arg1;
3336 case GT_EXPR: /* - [-, c] */
3337 low = 0, high = arg1;
3339 case GE_EXPR: /* + [c, -] */
3340 in_p = ! in_p, low = arg1, high = 0;
3342 case LT_EXPR: /* - [c, -] */
3343 low = arg1, high = 0;
3345 case LE_EXPR: /* + [-, c] */
3346 in_p = ! in_p, low = 0, high = arg1;
3354 /* If this is an unsigned comparison, we also know that EXP is
3355 greater than or equal to zero. We base the range tests we make
3356 on that fact, so we record it here so we can parse existing
3358 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3360 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3361 1, fold_convert (type, integer_zero_node),
3365 in_p = n_in_p, low = n_low, high = n_high;
3367 /* If the high bound is missing, but we have a nonzero low
3368 bound, reverse the range so it goes from zero to the low bound
3370 if (high == 0 && low && ! integer_zerop (low))
3373 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3374 integer_one_node, 0);
3375 low = fold_convert (type, integer_zero_node);
3381 /* (-x) IN [a,b] -> x in [-b, -a] */
3382 n_low = range_binop (MINUS_EXPR, type,
3383 fold_convert (type, integer_zero_node),
3385 n_high = range_binop (MINUS_EXPR, type,
3386 fold_convert (type, integer_zero_node),
3388 low = n_low, high = n_high;
3394 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3395 fold_convert (type, integer_one_node));
3398 case PLUS_EXPR: case MINUS_EXPR:
3399 if (TREE_CODE (arg1) != INTEGER_CST)
3402 /* If EXP is signed, any overflow in the computation is undefined,
3403 so we don't worry about it so long as our computations on
3404 the bounds don't overflow. For unsigned, overflow is defined
3405 and this is exactly the right thing. */
3406 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3407 type, low, 0, arg1, 0);
3408 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3409 type, high, 1, arg1, 0);
3410 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3411 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3414 /* Check for an unsigned range which has wrapped around the maximum
3415 value thus making n_high < n_low, and normalize it. */
3416 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3418 low = range_binop (PLUS_EXPR, type, n_high, 0,
3419 integer_one_node, 0);
3420 high = range_binop (MINUS_EXPR, type, n_low, 0,
3421 integer_one_node, 0);
3423 /* If the range is of the form +/- [ x+1, x ], we won't
3424 be able to normalize it. But then, it represents the
3425 whole range or the empty set, so make it
3427 if (tree_int_cst_equal (n_low, low)
3428 && tree_int_cst_equal (n_high, high))
3434 low = n_low, high = n_high;
3439 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3440 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3443 if (! INTEGRAL_TYPE_P (type)
3444 || (low != 0 && ! int_fits_type_p (low, type))
3445 || (high != 0 && ! int_fits_type_p (high, type)))
3448 n_low = low, n_high = high;
3451 n_low = fold_convert (type, n_low);
3454 n_high = fold_convert (type, n_high);
3456 /* If we're converting from an unsigned to a signed type,
3457 we will be doing the comparison as unsigned. The tests above
3458 have already verified that LOW and HIGH are both positive.
3460 So we have to make sure that the original unsigned value will
3461 be interpreted as positive. */
3462 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3464 tree equiv_type = (*lang_hooks.types.type_for_mode)
3465 (TYPE_MODE (type), 1);
3468 /* A range without an upper bound is, naturally, unbounded.
3469 Since convert would have cropped a very large value, use
3470 the max value for the destination type. */
3472 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3473 : TYPE_MAX_VALUE (type);
3475 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3476 high_positive = fold (build (RSHIFT_EXPR, type,
3480 integer_one_node)));
3482 /* If the low bound is specified, "and" the range with the
3483 range for which the original unsigned value will be
3487 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3488 1, n_low, n_high, 1,
3489 fold_convert (type, integer_zero_node),
3493 in_p = (n_in_p == in_p);
3497 /* Otherwise, "or" the range with the range of the input
3498 that will be interpreted as negative. */
3499 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3500 0, n_low, n_high, 1,
3501 fold_convert (type, integer_zero_node),
3505 in_p = (in_p != n_in_p);
3510 low = n_low, high = n_high;
3520 /* If EXP is a constant, we can evaluate whether this is true or false. */
3521 if (TREE_CODE (exp) == INTEGER_CST)
3523 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3525 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3531 *pin_p = in_p, *plow = low, *phigh = high;
3535 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3536 type, TYPE, return an expression to test if EXP is in (or out of, depending
3537 on IN_P) the range. */
3540 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3542 tree etype = TREE_TYPE (exp);
3546 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3547 return invert_truthvalue (value);
3549 if (low == 0 && high == 0)
3550 return fold_convert (type, integer_one_node);
3553 return fold (build (LE_EXPR, type, exp, high));
3556 return fold (build (GE_EXPR, type, exp, low));
3558 if (operand_equal_p (low, high, 0))
3559 return fold (build (EQ_EXPR, type, exp, low));
3561 if (integer_zerop (low))
3563 if (! TREE_UNSIGNED (etype))
3565 etype = (*lang_hooks.types.unsigned_type) (etype);
3566 high = fold_convert (etype, high);
3567 exp = fold_convert (etype, exp);
3569 return build_range_check (type, exp, 1, 0, high);
3572 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3573 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3575 unsigned HOST_WIDE_INT lo;
3579 prec = TYPE_PRECISION (etype);
3580 if (prec <= HOST_BITS_PER_WIDE_INT)
3583 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3587 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3588 lo = (unsigned HOST_WIDE_INT) -1;
3591 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3593 if (TREE_UNSIGNED (etype))
3595 etype = (*lang_hooks.types.signed_type) (etype);
3596 exp = fold_convert (etype, exp);
3598 return fold (build (GT_EXPR, type, exp,
3599 fold_convert (etype, integer_zero_node)));
3603 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3604 && ! TREE_OVERFLOW (value))
3605 return build_range_check (type,
3606 fold (build (MINUS_EXPR, etype, exp, low)),
3607 1, fold_convert (etype, integer_zero_node),
3613 /* Given two ranges, see if we can merge them into one. Return 1 if we
3614 can, 0 if we can't. Set the output range into the specified parameters. */
3617 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3618 tree high0, int in1_p, tree low1, tree high1)
3626 int lowequal = ((low0 == 0 && low1 == 0)
3627 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3628 low0, 0, low1, 0)));
3629 int highequal = ((high0 == 0 && high1 == 0)
3630 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3631 high0, 1, high1, 1)));
3633 /* Make range 0 be the range that starts first, or ends last if they
3634 start at the same value. Swap them if it isn't. */
3635 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3638 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3639 high1, 1, high0, 1))))
3641 temp = in0_p, in0_p = in1_p, in1_p = temp;
3642 tem = low0, low0 = low1, low1 = tem;
3643 tem = high0, high0 = high1, high1 = tem;
3646 /* Now flag two cases, whether the ranges are disjoint or whether the
3647 second range is totally subsumed in the first. Note that the tests
3648 below are simplified by the ones above. */
3649 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3650 high0, 1, low1, 0));
3651 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3652 high1, 1, high0, 1));
3654 /* We now have four cases, depending on whether we are including or
3655 excluding the two ranges. */
3658 /* If they don't overlap, the result is false. If the second range
3659 is a subset it is the result. Otherwise, the range is from the start
3660 of the second to the end of the first. */
3662 in_p = 0, low = high = 0;
3664 in_p = 1, low = low1, high = high1;
3666 in_p = 1, low = low1, high = high0;
3669 else if (in0_p && ! in1_p)
3671 /* If they don't overlap, the result is the first range. If they are
3672 equal, the result is false. If the second range is a subset of the
3673 first, and the ranges begin at the same place, we go from just after
3674 the end of the first range to the end of the second. If the second
3675 range is not a subset of the first, or if it is a subset and both
3676 ranges end at the same place, the range starts at the start of the
3677 first range and ends just before the second range.
3678 Otherwise, we can't describe this as a single range. */
3680 in_p = 1, low = low0, high = high0;
3681 else if (lowequal && highequal)
3682 in_p = 0, low = high = 0;
3683 else if (subset && lowequal)
3685 in_p = 1, high = high0;
3686 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3687 integer_one_node, 0);
3689 else if (! subset || highequal)
3691 in_p = 1, low = low0;
3692 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3693 integer_one_node, 0);
3699 else if (! in0_p && in1_p)
3701 /* If they don't overlap, the result is the second range. If the second
3702 is a subset of the first, the result is false. Otherwise,
3703 the range starts just after the first range and ends at the
3704 end of the second. */
3706 in_p = 1, low = low1, high = high1;
3707 else if (subset || highequal)
3708 in_p = 0, low = high = 0;
3711 in_p = 1, high = high1;
3712 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3713 integer_one_node, 0);
3719 /* The case where we are excluding both ranges. Here the complex case
3720 is if they don't overlap. In that case, the only time we have a
3721 range is if they are adjacent. If the second is a subset of the
3722 first, the result is the first. Otherwise, the range to exclude
3723 starts at the beginning of the first range and ends at the end of the
3727 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3728 range_binop (PLUS_EXPR, NULL_TREE,
3730 integer_one_node, 1),
3732 in_p = 0, low = low0, high = high1;
3737 in_p = 0, low = low0, high = high0;
3739 in_p = 0, low = low0, high = high1;
3742 *pin_p = in_p, *plow = low, *phigh = high;
3746 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3747 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3750 /* EXP is some logical combination of boolean tests. See if we can
3751 merge it into some range test. Return the new tree if so. */
3754 fold_range_test (tree exp)
3756 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3757 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3758 int in0_p, in1_p, in_p;
3759 tree low0, low1, low, high0, high1, high;
3760 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3761 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3764 /* If this is an OR operation, invert both sides; we will invert
3765 again at the end. */
3767 in0_p = ! in0_p, in1_p = ! in1_p;
3769 /* If both expressions are the same, if we can merge the ranges, and we
3770 can build the range test, return it or it inverted. If one of the
3771 ranges is always true or always false, consider it to be the same
3772 expression as the other. */
3773 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3774 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3776 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3778 : rhs != 0 ? rhs : integer_zero_node,
3780 return or_op ? invert_truthvalue (tem) : tem;
3782 /* On machines where the branch cost is expensive, if this is a
3783 short-circuited branch and the underlying object on both sides
3784 is the same, make a non-short-circuit operation. */
3785 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3786 && lhs != 0 && rhs != 0
3787 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3788 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3789 && operand_equal_p (lhs, rhs, 0))
3791 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3792 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3793 which cases we can't do this. */
3794 if (simple_operand_p (lhs))
3795 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3796 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3797 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3798 TREE_OPERAND (exp, 1));
3800 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3801 && ! CONTAINS_PLACEHOLDER_P (lhs))
3803 tree common = save_expr (lhs);
3805 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3806 or_op ? ! in0_p : in0_p,
3808 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3809 or_op ? ! in1_p : in1_p,
3811 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3812 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3813 TREE_TYPE (exp), lhs, rhs);
3820 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3821 bit value. Arrange things so the extra bits will be set to zero if and
3822 only if C is signed-extended to its full width. If MASK is nonzero,
3823 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3826 unextend (tree c, int p, int unsignedp, tree mask)
3828 tree type = TREE_TYPE (c);
3829 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3832 if (p == modesize || unsignedp)
3835 /* We work by getting just the sign bit into the low-order bit, then
3836 into the high-order bit, then sign-extend. We then XOR that value
3838 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3839 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3841 /* We must use a signed type in order to get an arithmetic right shift.
3842 However, we must also avoid introducing accidental overflows, so that
3843 a subsequent call to integer_zerop will work. Hence we must
3844 do the type conversion here. At this point, the constant is either
3845 zero or one, and the conversion to a signed type can never overflow.
3846 We could get an overflow if this conversion is done anywhere else. */
3847 if (TREE_UNSIGNED (type))
3848 temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3850 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3851 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3853 temp = const_binop (BIT_AND_EXPR, temp,
3854 fold_convert (TREE_TYPE (c), mask), 0);
3855 /* If necessary, convert the type back to match the type of C. */
3856 if (TREE_UNSIGNED (type))
3857 temp = fold_convert (type, temp);
3859 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3862 /* Find ways of folding logical expressions of LHS and RHS:
3863 Try to merge two comparisons to the same innermost item.
3864 Look for range tests like "ch >= '0' && ch <= '9'".
3865 Look for combinations of simple terms on machines with expensive branches
3866 and evaluate the RHS unconditionally.
3868 For example, if we have p->a == 2 && p->b == 4 and we can make an
3869 object large enough to span both A and B, we can do this with a comparison
3870 against the object ANDed with the a mask.
3872 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3873 operations to do this with one comparison.
3875 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3876 function and the one above.
3878 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3879 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3881 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3884 We return the simplified tree or 0 if no optimization is possible. */
3887 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3889 /* If this is the "or" of two comparisons, we can do something if
3890 the comparisons are NE_EXPR. If this is the "and", we can do something
3891 if the comparisons are EQ_EXPR. I.e.,
3892 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3894 WANTED_CODE is this operation code. For single bit fields, we can
3895 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3896 comparison for one-bit fields. */
3898 enum tree_code wanted_code;
3899 enum tree_code lcode, rcode;
3900 tree ll_arg, lr_arg, rl_arg, rr_arg;
3901 tree ll_inner, lr_inner, rl_inner, rr_inner;
3902 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3903 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3904 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3905 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3906 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3907 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3908 enum machine_mode lnmode, rnmode;
3909 tree ll_mask, lr_mask, rl_mask, rr_mask;
3910 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3911 tree l_const, r_const;
3912 tree lntype, rntype, result;
3913 int first_bit, end_bit;
3916 /* Start by getting the comparison codes. Fail if anything is volatile.
3917 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3918 it were surrounded with a NE_EXPR. */
3920 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3923 lcode = TREE_CODE (lhs);
3924 rcode = TREE_CODE (rhs);
3926 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3927 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3929 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3930 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3932 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3935 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3936 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3938 ll_arg = TREE_OPERAND (lhs, 0);
3939 lr_arg = TREE_OPERAND (lhs, 1);
3940 rl_arg = TREE_OPERAND (rhs, 0);
3941 rr_arg = TREE_OPERAND (rhs, 1);
3943 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3944 if (simple_operand_p (ll_arg)
3945 && simple_operand_p (lr_arg)
3946 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3950 if (operand_equal_p (ll_arg, rl_arg, 0)
3951 && operand_equal_p (lr_arg, rr_arg, 0))
3953 int lcompcode, rcompcode;
3955 lcompcode = comparison_to_compcode (lcode);
3956 rcompcode = comparison_to_compcode (rcode);
3957 compcode = (code == TRUTH_AND_EXPR)
3958 ? lcompcode & rcompcode
3959 : lcompcode | rcompcode;
3961 else if (operand_equal_p (ll_arg, rr_arg, 0)
3962 && operand_equal_p (lr_arg, rl_arg, 0))
3964 int lcompcode, rcompcode;
3966 rcode = swap_tree_comparison (rcode);
3967 lcompcode = comparison_to_compcode (lcode);
3968 rcompcode = comparison_to_compcode (rcode);
3969 compcode = (code == TRUTH_AND_EXPR)
3970 ? lcompcode & rcompcode
3971 : lcompcode | rcompcode;
3976 if (compcode == COMPCODE_TRUE)
3977 return fold_convert (truth_type, integer_one_node);
3978 else if (compcode == COMPCODE_FALSE)
3979 return fold_convert (truth_type, integer_zero_node);
3980 else if (compcode != -1)
3981 return build (compcode_to_comparison (compcode),
3982 truth_type, ll_arg, lr_arg);
3985 /* If the RHS can be evaluated unconditionally and its operands are
3986 simple, it wins to evaluate the RHS unconditionally on machines
3987 with expensive branches. In this case, this isn't a comparison
3988 that can be merged. Avoid doing this if the RHS is a floating-point
3989 comparison since those can trap. */
3991 if (BRANCH_COST >= 2
3992 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3993 && simple_operand_p (rl_arg)
3994 && simple_operand_p (rr_arg))
3996 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3997 if (code == TRUTH_OR_EXPR
3998 && lcode == NE_EXPR && integer_zerop (lr_arg)
3999 && rcode == NE_EXPR && integer_zerop (rr_arg)
4000 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4001 return build (NE_EXPR, truth_type,
4002 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4006 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4007 if (code == TRUTH_AND_EXPR
4008 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4009 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4010 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4011 return build (EQ_EXPR, truth_type,
4012 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4016 return build (code, truth_type, lhs, rhs);
4019 /* See if the comparisons can be merged. Then get all the parameters for
4022 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4023 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4027 ll_inner = decode_field_reference (ll_arg,
4028 &ll_bitsize, &ll_bitpos, &ll_mode,
4029 &ll_unsignedp, &volatilep, &ll_mask,
4031 lr_inner = decode_field_reference (lr_arg,
4032 &lr_bitsize, &lr_bitpos, &lr_mode,
4033 &lr_unsignedp, &volatilep, &lr_mask,
4035 rl_inner = decode_field_reference (rl_arg,
4036 &rl_bitsize, &rl_bitpos, &rl_mode,
4037 &rl_unsignedp, &volatilep, &rl_mask,
4039 rr_inner = decode_field_reference (rr_arg,
4040 &rr_bitsize, &rr_bitpos, &rr_mode,
4041 &rr_unsignedp, &volatilep, &rr_mask,
4044 /* It must be true that the inner operation on the lhs of each
4045 comparison must be the same if we are to be able to do anything.
4046 Then see if we have constants. If not, the same must be true for
4048 if (volatilep || ll_inner == 0 || rl_inner == 0
4049 || ! operand_equal_p (ll_inner, rl_inner, 0))
4052 if (TREE_CODE (lr_arg) == INTEGER_CST
4053 && TREE_CODE (rr_arg) == INTEGER_CST)
4054 l_const = lr_arg, r_const = rr_arg;
4055 else if (lr_inner == 0 || rr_inner == 0
4056 || ! operand_equal_p (lr_inner, rr_inner, 0))
4059 l_const = r_const = 0;
4061 /* If either comparison code is not correct for our logical operation,
4062 fail. However, we can convert a one-bit comparison against zero into
4063 the opposite comparison against that bit being set in the field. */
4065 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4066 if (lcode != wanted_code)
4068 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4070 /* Make the left operand unsigned, since we are only interested
4071 in the value of one bit. Otherwise we are doing the wrong
4080 /* This is analogous to the code for l_const above. */
4081 if (rcode != wanted_code)
4083 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4092 /* After this point all optimizations will generate bit-field
4093 references, which we might not want. */
4094 if (! (*lang_hooks.can_use_bit_fields_p) ())
4097 /* See if we can find a mode that contains both fields being compared on
4098 the left. If we can't, fail. Otherwise, update all constants and masks
4099 to be relative to a field of that size. */
4100 first_bit = MIN (ll_bitpos, rl_bitpos);
4101 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4102 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4103 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4105 if (lnmode == VOIDmode)
4108 lnbitsize = GET_MODE_BITSIZE (lnmode);
4109 lnbitpos = first_bit & ~ (lnbitsize - 1);
4110 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4111 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4113 if (BYTES_BIG_ENDIAN)
4115 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4116 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4119 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4120 size_int (xll_bitpos), 0);
4121 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4122 size_int (xrl_bitpos), 0);
4126 l_const = fold_convert (lntype, l_const);
4127 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4128 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4129 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4130 fold (build1 (BIT_NOT_EXPR,
4134 warning ("comparison is always %d", wanted_code == NE_EXPR);
4136 return fold_convert (truth_type,
4137 wanted_code == NE_EXPR
4138 ? integer_one_node : integer_zero_node);
4143 r_const = fold_convert (lntype, r_const);
4144 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4145 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4146 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4147 fold (build1 (BIT_NOT_EXPR,
4151 warning ("comparison is always %d", wanted_code == NE_EXPR);
4153 return fold_convert (truth_type,
4154 wanted_code == NE_EXPR
4155 ? integer_one_node : integer_zero_node);
4159 /* If the right sides are not constant, do the same for it. Also,
4160 disallow this optimization if a size or signedness mismatch occurs
4161 between the left and right sides. */
4164 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4165 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4166 /* Make sure the two fields on the right
4167 correspond to the left without being swapped. */
4168 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4171 first_bit = MIN (lr_bitpos, rr_bitpos);
4172 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4173 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4174 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4176 if (rnmode == VOIDmode)
4179 rnbitsize = GET_MODE_BITSIZE (rnmode);
4180 rnbitpos = first_bit & ~ (rnbitsize - 1);
4181 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4182 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4184 if (BYTES_BIG_ENDIAN)
4186 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4187 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4190 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4191 size_int (xlr_bitpos), 0);
4192 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4193 size_int (xrr_bitpos), 0);
4195 /* Make a mask that corresponds to both fields being compared.
4196 Do this for both items being compared. If the operands are the
4197 same size and the bits being compared are in the same position
4198 then we can do this by masking both and comparing the masked
4200 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4201 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4202 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4204 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4205 ll_unsignedp || rl_unsignedp);
4206 if (! all_ones_mask_p (ll_mask, lnbitsize))
4207 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4209 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4210 lr_unsignedp || rr_unsignedp);
4211 if (! all_ones_mask_p (lr_mask, rnbitsize))
4212 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4214 return build (wanted_code, truth_type, lhs, rhs);
4217 /* There is still another way we can do something: If both pairs of
4218 fields being compared are adjacent, we may be able to make a wider
4219 field containing them both.
4221 Note that we still must mask the lhs/rhs expressions. Furthermore,
4222 the mask must be shifted to account for the shift done by
4223 make_bit_field_ref. */
4224 if ((ll_bitsize + ll_bitpos == rl_bitpos
4225 && lr_bitsize + lr_bitpos == rr_bitpos)
4226 || (ll_bitpos == rl_bitpos + rl_bitsize
4227 && lr_bitpos == rr_bitpos + rr_bitsize))
4231 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4232 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4233 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4234 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4236 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4237 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4238 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4239 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4241 /* Convert to the smaller type before masking out unwanted bits. */
4243 if (lntype != rntype)
4245 if (lnbitsize > rnbitsize)
4247 lhs = fold_convert (rntype, lhs);
4248 ll_mask = fold_convert (rntype, ll_mask);
4251 else if (lnbitsize < rnbitsize)
4253 rhs = fold_convert (lntype, rhs);
4254 lr_mask = fold_convert (lntype, lr_mask);
4259 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4260 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4262 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4263 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4265 return build (wanted_code, truth_type, lhs, rhs);
4271 /* Handle the case of comparisons with constants. If there is something in
4272 common between the masks, those bits of the constants must be the same.
4273 If not, the condition is always false. Test for this to avoid generating
4274 incorrect code below. */
4275 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4276 if (! integer_zerop (result)
4277 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4278 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4280 if (wanted_code == NE_EXPR)
4282 warning ("`or' of unmatched not-equal tests is always 1");
4283 return fold_convert (truth_type, integer_one_node);
4287 warning ("`and' of mutually exclusive equal-tests is always 0");
4288 return fold_convert (truth_type, integer_zero_node);
4292 /* Construct the expression we will return. First get the component
4293 reference we will make. Unless the mask is all ones the width of
4294 that field, perform the mask operation. Then compare with the
4296 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4297 ll_unsignedp || rl_unsignedp);
4299 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4300 if (! all_ones_mask_p (ll_mask, lnbitsize))
4301 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4303 return build (wanted_code, truth_type, result,
4304 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4307 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4311 optimize_minmax_comparison (tree t)
4313 tree type = TREE_TYPE (t);
4314 tree arg0 = TREE_OPERAND (t, 0);
4315 enum tree_code op_code;
4316 tree comp_const = TREE_OPERAND (t, 1);
4318 int consts_equal, consts_lt;
4321 STRIP_SIGN_NOPS (arg0);
4323 op_code = TREE_CODE (arg0);
4324 minmax_const = TREE_OPERAND (arg0, 1);
4325 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4326 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4327 inner = TREE_OPERAND (arg0, 0);
4329 /* If something does not permit us to optimize, return the original tree. */
4330 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4331 || TREE_CODE (comp_const) != INTEGER_CST
4332 || TREE_CONSTANT_OVERFLOW (comp_const)
4333 || TREE_CODE (minmax_const) != INTEGER_CST
4334 || TREE_CONSTANT_OVERFLOW (minmax_const))
4337 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4338 and GT_EXPR, doing the rest with recursive calls using logical
4340 switch (TREE_CODE (t))
4342 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4344 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4348 fold (build (TRUTH_ORIF_EXPR, type,
4349 optimize_minmax_comparison
4350 (build (EQ_EXPR, type, arg0, comp_const)),
4351 optimize_minmax_comparison
4352 (build (GT_EXPR, type, arg0, comp_const))));
4355 if (op_code == MAX_EXPR && consts_equal)
4356 /* MAX (X, 0) == 0 -> X <= 0 */
4357 return fold (build (LE_EXPR, type, inner, comp_const));
4359 else if (op_code == MAX_EXPR && consts_lt)
4360 /* MAX (X, 0) == 5 -> X == 5 */
4361 return fold (build (EQ_EXPR, type, inner, comp_const));
4363 else if (op_code == MAX_EXPR)
4364 /* MAX (X, 0) == -1 -> false */
4365 return omit_one_operand (type, integer_zero_node, inner);
4367 else if (consts_equal)
4368 /* MIN (X, 0) == 0 -> X >= 0 */
4369 return fold (build (GE_EXPR, type, inner, comp_const));
4372 /* MIN (X, 0) == 5 -> false */
4373 return omit_one_operand (type, integer_zero_node, inner);
4376 /* MIN (X, 0) == -1 -> X == -1 */
4377 return fold (build (EQ_EXPR, type, inner, comp_const));
4380 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4381 /* MAX (X, 0) > 0 -> X > 0
4382 MAX (X, 0) > 5 -> X > 5 */
4383 return fold (build (GT_EXPR, type, inner, comp_const));
4385 else if (op_code == MAX_EXPR)
4386 /* MAX (X, 0) > -1 -> true */
4387 return omit_one_operand (type, integer_one_node, inner);
4389 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4390 /* MIN (X, 0) > 0 -> false
4391 MIN (X, 0) > 5 -> false */
4392 return omit_one_operand (type, integer_zero_node, inner);
4395 /* MIN (X, 0) > -1 -> X > -1 */
4396 return fold (build (GT_EXPR, type, inner, comp_const));
4403 /* T is an integer expression that is being multiplied, divided, or taken a
4404 modulus (CODE says which and what kind of divide or modulus) by a
4405 constant C. See if we can eliminate that operation by folding it with
4406 other operations already in T. WIDE_TYPE, if non-null, is a type that
4407 should be used for the computation if wider than our type.
4409 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4410 (X * 2) + (Y * 4). We must, however, be assured that either the original
4411 expression would not overflow or that overflow is undefined for the type
4412 in the language in question.
4414 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4415 the machine has a multiply-accumulate insn or that this is part of an
4416 addressing calculation.
4418 If we return a non-null expression, it is an equivalent form of the
4419 original computation, but need not be in the original type. */
4422 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4424 /* To avoid exponential search depth, refuse to allow recursion past
4425 three levels. Beyond that (1) it's highly unlikely that we'll find
4426 something interesting and (2) we've probably processed it before
4427 when we built the inner expression. */
4436 ret = extract_muldiv_1 (t, c, code, wide_type);
4443 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4445 tree type = TREE_TYPE (t);
4446 enum tree_code tcode = TREE_CODE (t);
4447 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4448 > GET_MODE_SIZE (TYPE_MODE (type)))
4449 ? wide_type : type);
4451 int same_p = tcode == code;
4452 tree op0 = NULL_TREE, op1 = NULL_TREE;
4454 /* Don't deal with constants of zero here; they confuse the code below. */
4455 if (integer_zerop (c))
4458 if (TREE_CODE_CLASS (tcode) == '1')
4459 op0 = TREE_OPERAND (t, 0);
4461 if (TREE_CODE_CLASS (tcode) == '2')
4462 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4464 /* Note that we need not handle conditional operations here since fold
4465 already handles those cases. So just do arithmetic here. */
4469 /* For a constant, we can always simplify if we are a multiply
4470 or (for divide and modulus) if it is a multiple of our constant. */
4471 if (code == MULT_EXPR
4472 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4473 return const_binop (code, fold_convert (ctype, t),
4474 fold_convert (ctype, c), 0);
4477 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4478 /* If op0 is an expression ... */
4479 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4480 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4481 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4482 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4483 /* ... and is unsigned, and its type is smaller than ctype,
4484 then we cannot pass through as widening. */
4485 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4486 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4487 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4488 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4489 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4490 /* ... or its type is larger than ctype,
4491 then we cannot pass through this truncation. */
4492 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4493 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4494 /* ... or signedness changes for division or modulus,
4495 then we cannot pass through this conversion. */
4496 || (code != MULT_EXPR
4497 && (TREE_UNSIGNED (ctype)
4498 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4501 /* Pass the constant down and see if we can make a simplification. If
4502 we can, replace this expression with the inner simplification for
4503 possible later conversion to our or some other type. */
4504 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4505 && TREE_CODE (t2) == INTEGER_CST
4506 && ! TREE_CONSTANT_OVERFLOW (t2)
4507 && (0 != (t1 = extract_muldiv (op0, t2, code,
4509 ? ctype : NULL_TREE))))
4513 case NEGATE_EXPR: case ABS_EXPR:
4514 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4515 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4518 case MIN_EXPR: case MAX_EXPR:
4519 /* If widening the type changes the signedness, then we can't perform
4520 this optimization as that changes the result. */
4521 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4524 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4525 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4526 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4528 if (tree_int_cst_sgn (c) < 0)
4529 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4531 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4532 fold_convert (ctype, t2)));
4536 case WITH_RECORD_EXPR:
4537 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4538 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4539 TREE_OPERAND (t, 1));
4542 case LSHIFT_EXPR: case RSHIFT_EXPR:
4543 /* If the second operand is constant, this is a multiplication
4544 or floor division, by a power of two, so we can treat it that
4545 way unless the multiplier or divisor overflows. */
4546 if (TREE_CODE (op1) == INTEGER_CST
4547 /* const_binop may not detect overflow correctly,
4548 so check for it explicitly here. */
4549 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4550 && TREE_INT_CST_HIGH (op1) == 0
4551 && 0 != (t1 = fold_convert (ctype,
4552 const_binop (LSHIFT_EXPR,
4555 && ! TREE_OVERFLOW (t1))
4556 return extract_muldiv (build (tcode == LSHIFT_EXPR
4557 ? MULT_EXPR : FLOOR_DIV_EXPR,
4558 ctype, fold_convert (ctype, op0), t1),
4559 c, code, wide_type);
4562 case PLUS_EXPR: case MINUS_EXPR:
4563 /* See if we can eliminate the operation on both sides. If we can, we
4564 can return a new PLUS or MINUS. If we can't, the only remaining
4565 cases where we can do anything are if the second operand is a
4567 t1 = extract_muldiv (op0, c, code, wide_type);
4568 t2 = extract_muldiv (op1, c, code, wide_type);
4569 if (t1 != 0 && t2 != 0
4570 && (code == MULT_EXPR
4571 /* If not multiplication, we can only do this if both operands
4572 are divisible by c. */
4573 || (multiple_of_p (ctype, op0, c)
4574 && multiple_of_p (ctype, op1, c))))
4575 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4576 fold_convert (ctype, t2)));
4578 /* If this was a subtraction, negate OP1 and set it to be an addition.
4579 This simplifies the logic below. */
4580 if (tcode == MINUS_EXPR)
4581 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4583 if (TREE_CODE (op1) != INTEGER_CST)
4586 /* If either OP1 or C are negative, this optimization is not safe for
4587 some of the division and remainder types while for others we need
4588 to change the code. */
4589 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4591 if (code == CEIL_DIV_EXPR)
4592 code = FLOOR_DIV_EXPR;
4593 else if (code == FLOOR_DIV_EXPR)
4594 code = CEIL_DIV_EXPR;
4595 else if (code != MULT_EXPR
4596 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4600 /* If it's a multiply or a division/modulus operation of a multiple
4601 of our constant, do the operation and verify it doesn't overflow. */
4602 if (code == MULT_EXPR
4603 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4605 op1 = const_binop (code, fold_convert (ctype, op1),
4606 fold_convert (ctype, c), 0);
4607 /* We allow the constant to overflow with wrapping semantics. */
4609 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4615 /* If we have an unsigned type is not a sizetype, we cannot widen
4616 the operation since it will change the result if the original
4617 computation overflowed. */
4618 if (TREE_UNSIGNED (ctype)
4619 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4623 /* If we were able to eliminate our operation from the first side,
4624 apply our operation to the second side and reform the PLUS. */
4625 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4626 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4628 /* The last case is if we are a multiply. In that case, we can
4629 apply the distributive law to commute the multiply and addition
4630 if the multiplication of the constants doesn't overflow. */
4631 if (code == MULT_EXPR)
4632 return fold (build (tcode, ctype,
4633 fold (build (code, ctype,
4634 fold_convert (ctype, op0),
4635 fold_convert (ctype, c))),
4641 /* We have a special case here if we are doing something like
4642 (C * 8) % 4 since we know that's zero. */
4643 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4644 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4645 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4646 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4647 return omit_one_operand (type, integer_zero_node, op0);
4649 /* ... fall through ... */
4651 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4652 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4653 /* If we can extract our operation from the LHS, do so and return a
4654 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4655 do something only if the second operand is a constant. */
4657 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4658 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4659 fold_convert (ctype, op1)));
4660 else if (tcode == MULT_EXPR && code == MULT_EXPR
4661 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4662 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4663 fold_convert (ctype, t1)));
4664 else if (TREE_CODE (op1) != INTEGER_CST)
4667 /* If these are the same operation types, we can associate them
4668 assuming no overflow. */
4670 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4671 fold_convert (ctype, c), 0))
4672 && ! TREE_OVERFLOW (t1))
4673 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4675 /* If these operations "cancel" each other, we have the main
4676 optimizations of this pass, which occur when either constant is a
4677 multiple of the other, in which case we replace this with either an
4678 operation or CODE or TCODE.
4680 If we have an unsigned type that is not a sizetype, we cannot do
4681 this since it will change the result if the original computation
4683 if ((! TREE_UNSIGNED (ctype)
4684 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4686 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4687 || (tcode == MULT_EXPR
4688 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4689 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4691 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4692 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4693 fold_convert (ctype,
4694 const_binop (TRUNC_DIV_EXPR,
4696 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4697 return fold (build (code, ctype, fold_convert (ctype, op0),
4698 fold_convert (ctype,
4699 const_binop (TRUNC_DIV_EXPR,
4711 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4712 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4713 that we may sometimes modify the tree. */
4716 strip_compound_expr (tree t, tree s)
4718 enum tree_code code = TREE_CODE (t);
4720 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4721 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4722 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4723 return TREE_OPERAND (t, 1);
4725 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4726 don't bother handling any other types. */
4727 else if (code == COND_EXPR)
4729 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4730 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4731 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4733 else if (TREE_CODE_CLASS (code) == '1')
4734 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4735 else if (TREE_CODE_CLASS (code) == '<'
4736 || TREE_CODE_CLASS (code) == '2')
4738 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4739 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4745 /* Return a node which has the indicated constant VALUE (either 0 or
4746 1), and is of the indicated TYPE. */
4749 constant_boolean_node (int value, tree type)
4751 if (type == integer_type_node)
4752 return value ? integer_one_node : integer_zero_node;
4753 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4754 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4758 tree t = build_int_2 (value, 0);
4760 TREE_TYPE (t) = type;
4765 /* Utility function for the following routine, to see how complex a nesting of
4766 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4767 we don't care (to avoid spending too much time on complex expressions.). */
4770 count_cond (tree expr, int lim)
4774 if (TREE_CODE (expr) != COND_EXPR)
4779 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4780 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4781 return MIN (lim, 1 + ctrue + cfalse);
4784 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4785 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4786 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4787 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4788 COND is the first argument to CODE; otherwise (as in the example
4789 given here), it is the second argument. TYPE is the type of the
4790 original expression. */
4793 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4794 tree cond, tree arg, int cond_first_p)
4796 tree test, true_value, false_value;
4797 tree lhs = NULL_TREE;
4798 tree rhs = NULL_TREE;
4799 /* In the end, we'll produce a COND_EXPR. Both arms of the
4800 conditional expression will be binary operations. The left-hand
4801 side of the expression to be executed if the condition is true
4802 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4803 of the expression to be executed if the condition is true will be
4804 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4805 but apply to the expression to be executed if the conditional is
4811 /* These are the codes to use for the left-hand side and right-hand
4812 side of the COND_EXPR. Normally, they are the same as CODE. */
4813 enum tree_code lhs_code = code;
4814 enum tree_code rhs_code = code;
4815 /* And these are the types of the expressions. */
4816 tree lhs_type = type;
4817 tree rhs_type = type;
4822 true_rhs = false_rhs = &arg;
4823 true_lhs = &true_value;
4824 false_lhs = &false_value;
4828 true_lhs = false_lhs = &arg;
4829 true_rhs = &true_value;
4830 false_rhs = &false_value;
4833 if (TREE_CODE (cond) == COND_EXPR)
4835 test = TREE_OPERAND (cond, 0);
4836 true_value = TREE_OPERAND (cond, 1);
4837 false_value = TREE_OPERAND (cond, 2);
4838 /* If this operand throws an expression, then it does not make
4839 sense to try to perform a logical or arithmetic operation
4840 involving it. Instead of building `a + throw 3' for example,
4841 we simply build `a, throw 3'. */
4842 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4846 lhs_code = COMPOUND_EXPR;
4847 lhs_type = void_type_node;
4852 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4856 rhs_code = COMPOUND_EXPR;
4857 rhs_type = void_type_node;
4865 tree testtype = TREE_TYPE (cond);
4867 true_value = fold_convert (testtype, integer_one_node);
4868 false_value = fold_convert (testtype, integer_zero_node);
4871 /* If ARG is complex we want to make sure we only evaluate it once. Though
4872 this is only required if it is volatile, it might be more efficient even
4873 if it is not. However, if we succeed in folding one part to a constant,
4874 we do not need to make this SAVE_EXPR. Since we do this optimization
4875 primarily to see if we do end up with constant and this SAVE_EXPR
4876 interferes with later optimizations, suppressing it when we can is
4879 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4880 do so. Don't try to see if the result is a constant if an arm is a
4881 COND_EXPR since we get exponential behavior in that case. */
4883 if (saved_expr_p (arg))
4885 else if (lhs == 0 && rhs == 0
4886 && !TREE_CONSTANT (arg)
4887 && (*lang_hooks.decls.global_bindings_p) () == 0
4888 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4889 || TREE_SIDE_EFFECTS (arg)))
4891 if (TREE_CODE (true_value) != COND_EXPR)
4892 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4894 if (TREE_CODE (false_value) != COND_EXPR)
4895 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4897 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4898 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4900 arg = save_expr (arg);
4902 save = saved_expr_p (arg);
4907 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4909 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4911 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4913 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4914 ahead of the COND_EXPR we made. Otherwise we would have it only
4915 evaluated in one branch, with the other branch using the result
4916 but missing the evaluation code. Beware that the save_expr call
4917 above might not return a SAVE_EXPR, so testing the TREE_CODE
4918 of ARG is not enough to decide here. Â */
4920 return build (COMPOUND_EXPR, type,
4921 fold_convert (void_type_node, arg),
4922 strip_compound_expr (test, arg));
4924 return fold_convert (type, test);
4928 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4930 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4931 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4932 ADDEND is the same as X.
4934 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4935 and finite. The problematic cases are when X is zero, and its mode
4936 has signed zeros. In the case of rounding towards -infinity,
4937 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4938 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4941 fold_real_zero_addition_p (tree type, tree addend, int negate)
4943 if (!real_zerop (addend))
4946 /* Don't allow the fold with -fsignaling-nans. */
4947 if (HONOR_SNANS (TYPE_MODE (type)))
4950 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4951 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4954 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4955 if (TREE_CODE (addend) == REAL_CST
4956 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4959 /* The mode has signed zeros, and we have to honor their sign.
4960 In this situation, there is only one case we can return true for.
4961 X - 0 is the same as X unless rounding towards -infinity is
4963 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4966 /* Subroutine of fold() that checks comparisons of built-in math
4967 functions against real constants.
4969 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4970 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4971 is the type of the result and ARG0 and ARG1 are the operands of the
4972 comparison. ARG1 must be a TREE_REAL_CST.
4974 The function returns the constant folded tree if a simplification
4975 can be made, and NULL_TREE otherwise. */
4978 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4979 tree type, tree arg0, tree arg1)
4983 if (fcode == BUILT_IN_SQRT
4984 || fcode == BUILT_IN_SQRTF
4985 || fcode == BUILT_IN_SQRTL)
4987 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4988 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4990 c = TREE_REAL_CST (arg1);
4991 if (REAL_VALUE_NEGATIVE (c))
4993 /* sqrt(x) < y is always false, if y is negative. */
4994 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4995 return omit_one_operand (type,
4996 fold_convert (type, integer_zero_node),
4999 /* sqrt(x) > y is always true, if y is negative and we
5000 don't care about NaNs, i.e. negative values of x. */
5001 if (code == NE_EXPR || !HONOR_NANS (mode))
5002 return omit_one_operand (type,
5003 fold_convert (type, integer_one_node),
5006 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5007 return fold (build (GE_EXPR, type, arg,
5008 build_real (TREE_TYPE (arg), dconst0)));
5010 else if (code == GT_EXPR || code == GE_EXPR)
5014 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5015 real_convert (&c2, mode, &c2);
5017 if (REAL_VALUE_ISINF (c2))
5019 /* sqrt(x) > y is x == +Inf, when y is very large. */
5020 if (HONOR_INFINITIES (mode))
5021 return fold (build (EQ_EXPR, type, arg,
5022 build_real (TREE_TYPE (arg), c2)));
5024 /* sqrt(x) > y is always false, when y is very large
5025 and we don't care about infinities. */
5026 return omit_one_operand (type,
5027 fold_convert (type, integer_zero_node),
5031 /* sqrt(x) > c is the same as x > c*c. */
5032 return fold (build (code, type, arg,
5033 build_real (TREE_TYPE (arg), c2)));
5035 else if (code == LT_EXPR || code == LE_EXPR)
5039 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5040 real_convert (&c2, mode, &c2);
5042 if (REAL_VALUE_ISINF (c2))
5044 /* sqrt(x) < y is always true, when y is a very large
5045 value and we don't care about NaNs or Infinities. */
5046 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5047 return omit_one_operand (type,
5048 fold_convert (type, integer_one_node),
5051 /* sqrt(x) < y is x != +Inf when y is very large and we
5052 don't care about NaNs. */
5053 if (! HONOR_NANS (mode))
5054 return fold (build (NE_EXPR, type, arg,
5055 build_real (TREE_TYPE (arg), c2)));
5057 /* sqrt(x) < y is x >= 0 when y is very large and we
5058 don't care about Infinities. */
5059 if (! HONOR_INFINITIES (mode))
5060 return fold (build (GE_EXPR, type, arg,
5061 build_real (TREE_TYPE (arg), dconst0)));
5063 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5064 if ((*lang_hooks.decls.global_bindings_p) () != 0
5065 || CONTAINS_PLACEHOLDER_P (arg))
5068 arg = save_expr (arg);
5069 return fold (build (TRUTH_ANDIF_EXPR, type,
5070 fold (build (GE_EXPR, type, arg,
5071 build_real (TREE_TYPE (arg),
5073 fold (build (NE_EXPR, type, arg,
5074 build_real (TREE_TYPE (arg),
5078 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5079 if (! HONOR_NANS (mode))
5080 return fold (build (code, type, arg,
5081 build_real (TREE_TYPE (arg), c2)));
5083 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5084 if ((*lang_hooks.decls.global_bindings_p) () == 0
5085 && ! CONTAINS_PLACEHOLDER_P (arg))
5087 arg = save_expr (arg);
5088 return fold (build (TRUTH_ANDIF_EXPR, type,
5089 fold (build (GE_EXPR, type, arg,
5090 build_real (TREE_TYPE (arg),
5092 fold (build (code, type, arg,
5093 build_real (TREE_TYPE (arg),
5102 /* Subroutine of fold() that optimizes comparisons against Infinities,
5103 either +Inf or -Inf.
5105 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5106 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5107 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5109 The function returns the constant folded tree if a simplification
5110 can be made, and NULL_TREE otherwise. */
5113 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5115 enum machine_mode mode;
5116 REAL_VALUE_TYPE max;
5120 mode = TYPE_MODE (TREE_TYPE (arg0));
5122 /* For negative infinity swap the sense of the comparison. */
5123 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5125 code = swap_tree_comparison (code);
5130 /* x > +Inf is always false, if with ignore sNANs. */
5131 if (HONOR_SNANS (mode))
5133 return omit_one_operand (type,
5134 fold_convert (type, integer_zero_node),
5138 /* x <= +Inf is always true, if we don't case about NaNs. */
5139 if (! HONOR_NANS (mode))
5140 return omit_one_operand (type,
5141 fold_convert (type, integer_one_node),
5144 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5145 if ((*lang_hooks.decls.global_bindings_p) () == 0
5146 && ! CONTAINS_PLACEHOLDER_P (arg0))
5148 arg0 = save_expr (arg0);
5149 return fold (build (EQ_EXPR, type, arg0, arg0));
5155 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5156 real_maxval (&max, neg, mode);
5157 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5158 arg0, build_real (TREE_TYPE (arg0), max)));
5161 /* x < +Inf is always equal to x <= DBL_MAX. */
5162 real_maxval (&max, neg, mode);
5163 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5164 arg0, build_real (TREE_TYPE (arg0), max)));
5167 /* x != +Inf is always equal to !(x > DBL_MAX). */
5168 real_maxval (&max, neg, mode);
5169 if (! HONOR_NANS (mode))
5170 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5171 arg0, build_real (TREE_TYPE (arg0), max)));
5172 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5173 arg0, build_real (TREE_TYPE (arg0), max)));
5174 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5183 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5184 equality/inequality test, then return a simplified form of
5185 the test using shifts and logical operations. Otherwise return
5186 NULL. TYPE is the desired result type. */
5189 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5192 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5194 if (code == TRUTH_NOT_EXPR)
5196 code = TREE_CODE (arg0);
5197 if (code != NE_EXPR && code != EQ_EXPR)
5200 /* Extract the arguments of the EQ/NE. */
5201 arg1 = TREE_OPERAND (arg0, 1);
5202 arg0 = TREE_OPERAND (arg0, 0);
5204 /* This requires us to invert the code. */
5205 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5208 /* If this is testing a single bit, we can optimize the test. */
5209 if ((code == NE_EXPR || code == EQ_EXPR)
5210 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5211 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5213 tree inner = TREE_OPERAND (arg0, 0);
5214 tree type = TREE_TYPE (arg0);
5215 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5216 enum machine_mode operand_mode = TYPE_MODE (type);
5218 tree signed_type, unsigned_type, intermediate_type;
5221 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5222 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5223 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5224 if (arg00 != NULL_TREE)
5226 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5227 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5228 fold_convert (stype, arg00),
5229 fold_convert (stype, integer_zero_node)));
5232 /* At this point, we know that arg0 is not testing the sign bit. */
5233 if (TYPE_PRECISION (type) - 1 == bitnum)
5236 /* Otherwise we have (A & C) != 0 where C is a single bit,
5237 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5238 Similarly for (A & C) == 0. */
5240 /* If INNER is a right shift of a constant and it plus BITNUM does
5241 not overflow, adjust BITNUM and INNER. */
5242 if (TREE_CODE (inner) == RSHIFT_EXPR
5243 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5244 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5245 && bitnum < TYPE_PRECISION (type)
5246 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5247 bitnum - TYPE_PRECISION (type)))
5249 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5250 inner = TREE_OPERAND (inner, 0);
5253 /* If we are going to be able to omit the AND below, we must do our
5254 operations as unsigned. If we must use the AND, we have a choice.
5255 Normally unsigned is faster, but for some machines signed is. */
5256 #ifdef LOAD_EXTEND_OP
5257 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5262 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5263 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5264 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5265 inner = fold_convert (intermediate_type, inner);
5268 inner = build (RSHIFT_EXPR, intermediate_type,
5269 inner, size_int (bitnum));
5271 if (code == EQ_EXPR)
5272 inner = build (BIT_XOR_EXPR, intermediate_type,
5273 inner, integer_one_node);
5275 /* Put the AND last so it can combine with more things. */
5276 inner = build (BIT_AND_EXPR, intermediate_type,
5277 inner, integer_one_node);
5279 /* Make sure to return the proper type. */
5280 inner = fold_convert (result_type, inner);
5287 /* Check whether we are allowed to reorder operands arg0 and arg1,
5288 such that the evaluation of arg1 occurs before arg0. */
5291 reorder_operands_p (tree arg0, tree arg1)
5293 if (! flag_evaluation_order)
5295 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5297 return ! TREE_SIDE_EFFECTS (arg0)
5298 && ! TREE_SIDE_EFFECTS (arg1);
5301 /* Test whether it is preferable two swap two operands, ARG0 and
5302 ARG1, for example because ARG0 is an integer constant and ARG1
5303 isn't. If REORDER is true, only recommend swapping if we can
5304 evaluate the operands in reverse order. */
5307 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5309 STRIP_SIGN_NOPS (arg0);
5310 STRIP_SIGN_NOPS (arg1);
5312 if (TREE_CODE (arg1) == INTEGER_CST)
5314 if (TREE_CODE (arg0) == INTEGER_CST)
5317 if (TREE_CODE (arg1) == REAL_CST)
5319 if (TREE_CODE (arg0) == REAL_CST)
5322 if (TREE_CODE (arg1) == COMPLEX_CST)
5324 if (TREE_CODE (arg0) == COMPLEX_CST)
5327 if (TREE_CONSTANT (arg1))
5329 if (TREE_CONSTANT (arg0))
5335 if (reorder && flag_evaluation_order
5336 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5347 /* Perform constant folding and related simplification of EXPR.
5348 The related simplifications include x*1 => x, x*0 => 0, etc.,
5349 and application of the associative law.
5350 NOP_EXPR conversions may be removed freely (as long as we
5351 are careful not to change the C type of the overall expression)
5352 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5353 but we can constant-fold them if they have constant operands. */
5355 #ifdef ENABLE_FOLD_CHECKING
5356 # define fold(x) fold_1 (x)
5357 static tree fold_1 (tree);
5363 tree t = expr, orig_t;
5364 tree t1 = NULL_TREE;
5366 tree type = TREE_TYPE (expr);
5367 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5368 enum tree_code code = TREE_CODE (t);
5369 int kind = TREE_CODE_CLASS (code);
5371 /* WINS will be nonzero when the switch is done
5372 if all operands are constant. */
5375 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5376 Likewise for a SAVE_EXPR that's already been evaluated. */
5377 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5380 /* Return right away if a constant. */
5386 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5390 /* Special case for conversion ops that can have fixed point args. */
5391 arg0 = TREE_OPERAND (t, 0);
5393 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5395 STRIP_SIGN_NOPS (arg0);
5397 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5398 subop = TREE_REALPART (arg0);
5402 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5403 && TREE_CODE (subop) != REAL_CST)
5404 /* Note that TREE_CONSTANT isn't enough:
5405 static var addresses are constant but we can't
5406 do arithmetic on them. */
5409 else if (IS_EXPR_CODE_CLASS (kind))
5411 int len = first_rtl_op (code);
5413 for (i = 0; i < len; i++)
5415 tree op = TREE_OPERAND (t, i);
5419 continue; /* Valid for CALL_EXPR, at least. */
5421 if (kind == '<' || code == RSHIFT_EXPR)
5423 /* Signedness matters here. Perhaps we can refine this
5425 STRIP_SIGN_NOPS (op);
5428 /* Strip any conversions that don't change the mode. */
5431 if (TREE_CODE (op) == COMPLEX_CST)
5432 subop = TREE_REALPART (op);
5436 if (TREE_CODE (subop) != INTEGER_CST
5437 && TREE_CODE (subop) != REAL_CST)
5438 /* Note that TREE_CONSTANT isn't enough:
5439 static var addresses are constant but we can't
5440 do arithmetic on them. */
5450 /* If this is a commutative operation, and ARG0 is a constant, move it
5451 to ARG1 to reduce the number of tests below. */
5452 if (commutative_tree_code (code)
5453 && tree_swap_operands_p (arg0, arg1, true))
5454 return fold (build (code, type, arg1, arg0));
5456 /* Now WINS is set as described above,
5457 ARG0 is the first operand of EXPR,
5458 and ARG1 is the second operand (if it has more than one operand).
5460 First check for cases where an arithmetic operation is applied to a
5461 compound, conditional, or comparison operation. Push the arithmetic
5462 operation inside the compound or conditional to see if any folding
5463 can then be done. Convert comparison to conditional for this purpose.
5464 The also optimizes non-constant cases that used to be done in
5467 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5468 one of the operands is a comparison and the other is a comparison, a
5469 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5470 code below would make the expression more complex. Change it to a
5471 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5472 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5474 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5475 || code == EQ_EXPR || code == NE_EXPR)
5476 && ((truth_value_p (TREE_CODE (arg0))
5477 && (truth_value_p (TREE_CODE (arg1))
5478 || (TREE_CODE (arg1) == BIT_AND_EXPR
5479 && integer_onep (TREE_OPERAND (arg1, 1)))))
5480 || (truth_value_p (TREE_CODE (arg1))
5481 && (truth_value_p (TREE_CODE (arg0))
5482 || (TREE_CODE (arg0) == BIT_AND_EXPR
5483 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5485 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5486 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5490 if (code == EQ_EXPR)
5491 t = invert_truthvalue (t);
5496 if (TREE_CODE_CLASS (code) == '1')
5498 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5499 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5500 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5501 else if (TREE_CODE (arg0) == COND_EXPR)
5503 tree arg01 = TREE_OPERAND (arg0, 1);
5504 tree arg02 = TREE_OPERAND (arg0, 2);
5505 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5506 arg01 = fold (build1 (code, type, arg01));
5507 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5508 arg02 = fold (build1 (code, type, arg02));
5509 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5512 /* If this was a conversion, and all we did was to move into
5513 inside the COND_EXPR, bring it back out. But leave it if
5514 it is a conversion from integer to integer and the
5515 result precision is no wider than a word since such a
5516 conversion is cheap and may be optimized away by combine,
5517 while it couldn't if it were outside the COND_EXPR. Then return
5518 so we don't get into an infinite recursion loop taking the
5519 conversion out and then back in. */
5521 if ((code == NOP_EXPR || code == CONVERT_EXPR
5522 || code == NON_LVALUE_EXPR)
5523 && TREE_CODE (t) == COND_EXPR
5524 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5525 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5526 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5527 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5528 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5529 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5530 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5532 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5533 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5534 t = build1 (code, type,
5536 TREE_TYPE (TREE_OPERAND
5537 (TREE_OPERAND (t, 1), 0)),
5538 TREE_OPERAND (t, 0),
5539 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5540 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5543 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5544 return fold (build (COND_EXPR, type, arg0,
5545 fold (build1 (code, type, integer_one_node)),
5546 fold (build1 (code, type, integer_zero_node))));
5548 else if (TREE_CODE_CLASS (code) == '<'
5549 && TREE_CODE (arg0) == COMPOUND_EXPR)
5550 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5551 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5552 else if (TREE_CODE_CLASS (code) == '<'
5553 && TREE_CODE (arg1) == COMPOUND_EXPR)
5554 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5555 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5556 else if (TREE_CODE_CLASS (code) == '2'
5557 || TREE_CODE_CLASS (code) == '<')
5559 if (TREE_CODE (arg1) == COMPOUND_EXPR
5560 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5561 && ! TREE_SIDE_EFFECTS (arg0))
5562 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5563 fold (build (code, type,
5564 arg0, TREE_OPERAND (arg1, 1))));
5565 else if ((TREE_CODE (arg1) == COND_EXPR
5566 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5567 && TREE_CODE_CLASS (code) != '<'))
5568 && (TREE_CODE (arg0) != COND_EXPR
5569 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5570 && (! TREE_SIDE_EFFECTS (arg0)
5571 || ((*lang_hooks.decls.global_bindings_p) () == 0
5572 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5574 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5575 /*cond_first_p=*/0);
5576 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5577 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5578 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5579 else if ((TREE_CODE (arg0) == COND_EXPR
5580 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5581 && TREE_CODE_CLASS (code) != '<'))
5582 && (TREE_CODE (arg1) != COND_EXPR
5583 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5584 && (! TREE_SIDE_EFFECTS (arg1)
5585 || ((*lang_hooks.decls.global_bindings_p) () == 0
5586 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5588 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5589 /*cond_first_p=*/1);
5603 return fold (DECL_INITIAL (t));
5608 case FIX_TRUNC_EXPR:
5610 case FIX_FLOOR_EXPR:
5611 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5612 return TREE_OPERAND (t, 0);
5614 /* Handle cases of two conversions in a row. */
5615 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5616 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5618 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5619 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5620 tree final_type = TREE_TYPE (t);
5621 int inside_int = INTEGRAL_TYPE_P (inside_type);
5622 int inside_ptr = POINTER_TYPE_P (inside_type);
5623 int inside_float = FLOAT_TYPE_P (inside_type);
5624 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5625 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5626 int inter_int = INTEGRAL_TYPE_P (inter_type);
5627 int inter_ptr = POINTER_TYPE_P (inter_type);
5628 int inter_float = FLOAT_TYPE_P (inter_type);
5629 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5630 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5631 int final_int = INTEGRAL_TYPE_P (final_type);
5632 int final_ptr = POINTER_TYPE_P (final_type);
5633 int final_float = FLOAT_TYPE_P (final_type);
5634 unsigned int final_prec = TYPE_PRECISION (final_type);
5635 int final_unsignedp = TREE_UNSIGNED (final_type);
5637 /* In addition to the cases of two conversions in a row
5638 handled below, if we are converting something to its own
5639 type via an object of identical or wider precision, neither
5640 conversion is needed. */
5641 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5642 && ((inter_int && final_int) || (inter_float && final_float))
5643 && inter_prec >= final_prec)
5644 return fold (build1 (code, final_type,
5645 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5647 /* Likewise, if the intermediate and final types are either both
5648 float or both integer, we don't need the middle conversion if
5649 it is wider than the final type and doesn't change the signedness
5650 (for integers). Avoid this if the final type is a pointer
5651 since then we sometimes need the inner conversion. Likewise if
5652 the outer has a precision not equal to the size of its mode. */
5653 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5654 || (inter_float && inside_float))
5655 && inter_prec >= inside_prec
5656 && (inter_float || inter_unsignedp == inside_unsignedp)
5657 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5658 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5660 return fold (build1 (code, final_type,
5661 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5663 /* If we have a sign-extension of a zero-extended value, we can
5664 replace that by a single zero-extension. */
5665 if (inside_int && inter_int && final_int
5666 && inside_prec < inter_prec && inter_prec < final_prec
5667 && inside_unsignedp && !inter_unsignedp)
5668 return fold (build1 (code, final_type,
5669 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5671 /* Two conversions in a row are not needed unless:
5672 - some conversion is floating-point (overstrict for now), or
5673 - the intermediate type is narrower than both initial and
5675 - the intermediate type and innermost type differ in signedness,
5676 and the outermost type is wider than the intermediate, or
5677 - the initial type is a pointer type and the precisions of the
5678 intermediate and final types differ, or
5679 - the final type is a pointer type and the precisions of the
5680 initial and intermediate types differ. */
5681 if (! inside_float && ! inter_float && ! final_float
5682 && (inter_prec > inside_prec || inter_prec > final_prec)
5683 && ! (inside_int && inter_int
5684 && inter_unsignedp != inside_unsignedp
5685 && inter_prec < final_prec)
5686 && ((inter_unsignedp && inter_prec > inside_prec)
5687 == (final_unsignedp && final_prec > inter_prec))
5688 && ! (inside_ptr && inter_prec != final_prec)
5689 && ! (final_ptr && inside_prec != inter_prec)
5690 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5691 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5693 return fold (build1 (code, final_type,
5694 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5697 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5698 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5699 /* Detect assigning a bitfield. */
5700 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5701 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5703 /* Don't leave an assignment inside a conversion
5704 unless assigning a bitfield. */
5705 tree prev = TREE_OPERAND (t, 0);
5708 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5709 /* First do the assignment, then return converted constant. */
5710 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5715 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5716 constants (if x has signed type, the sign bit cannot be set
5717 in c). This folds extension into the BIT_AND_EXPR. */
5718 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5719 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5720 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5721 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5723 tree and = TREE_OPERAND (t, 0);
5724 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5727 if (TREE_UNSIGNED (TREE_TYPE (and))
5728 || (TYPE_PRECISION (TREE_TYPE (t))
5729 <= TYPE_PRECISION (TREE_TYPE (and))))
5731 else if (TYPE_PRECISION (TREE_TYPE (and1))
5732 <= HOST_BITS_PER_WIDE_INT
5733 && host_integerp (and1, 1))
5735 unsigned HOST_WIDE_INT cst;
5737 cst = tree_low_cst (and1, 1);
5738 cst &= (HOST_WIDE_INT) -1
5739 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5740 change = (cst == 0);
5741 #ifdef LOAD_EXTEND_OP
5743 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5746 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5747 and0 = fold_convert (uns, and0);
5748 and1 = fold_convert (uns, and1);
5753 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5754 fold_convert (TREE_TYPE (t), and0),
5755 fold_convert (TREE_TYPE (t), and1)));
5758 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5759 return tem ? tem : t;
5761 case VIEW_CONVERT_EXPR:
5762 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5763 return build1 (VIEW_CONVERT_EXPR, type,
5764 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5768 if (TREE_CODE (arg0) == CONSTRUCTOR
5769 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5771 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5778 if (TREE_CONSTANT (t) != wins)
5782 TREE_CONSTANT (t) = wins;
5787 if (negate_expr_p (arg0))
5788 return negate_expr (arg0);
5794 if (TREE_CODE (arg0) == INTEGER_CST)
5796 /* If the value is unsigned, then the absolute value is
5797 the same as the ordinary value. */
5798 if (TREE_UNSIGNED (type))
5800 /* Similarly, if the value is non-negative. */
5801 else if (INT_CST_LT (integer_minus_one_node, arg0))
5803 /* If the value is negative, then the absolute value is
5807 unsigned HOST_WIDE_INT low;
5809 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5810 TREE_INT_CST_HIGH (arg0),
5812 t = build_int_2 (low, high);
5813 TREE_TYPE (t) = type;
5815 = (TREE_OVERFLOW (arg0)
5816 | force_fit_type (t, overflow));
5817 TREE_CONSTANT_OVERFLOW (t)
5818 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5821 else if (TREE_CODE (arg0) == REAL_CST)
5823 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5824 t = build_real (type,
5825 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5828 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5829 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5830 /* Convert fabs((double)float) into (double)fabsf(float). */
5831 else if (TREE_CODE (arg0) == NOP_EXPR
5832 && TREE_CODE (type) == REAL_TYPE)
5834 tree targ0 = strip_float_extensions (arg0);
5836 return fold_convert (type, fold (build1 (ABS_EXPR,
5840 else if (tree_expr_nonnegative_p (arg0))
5845 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5846 return fold_convert (type, arg0);
5847 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5848 return build (COMPLEX_EXPR, type,
5849 TREE_OPERAND (arg0, 0),
5850 negate_expr (TREE_OPERAND (arg0, 1)));
5851 else if (TREE_CODE (arg0) == COMPLEX_CST)
5852 return build_complex (type, TREE_REALPART (arg0),
5853 negate_expr (TREE_IMAGPART (arg0)));
5854 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5855 return fold (build (TREE_CODE (arg0), type,
5856 fold (build1 (CONJ_EXPR, type,
5857 TREE_OPERAND (arg0, 0))),
5858 fold (build1 (CONJ_EXPR,
5859 type, TREE_OPERAND (arg0, 1)))));
5860 else if (TREE_CODE (arg0) == CONJ_EXPR)
5861 return TREE_OPERAND (arg0, 0);
5867 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5868 ~ TREE_INT_CST_HIGH (arg0));
5869 TREE_TYPE (t) = type;
5870 force_fit_type (t, 0);
5871 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5872 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5874 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5875 return TREE_OPERAND (arg0, 0);
5879 /* A + (-B) -> A - B */
5880 if (TREE_CODE (arg1) == NEGATE_EXPR)
5881 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5882 /* (-A) + B -> B - A */
5883 if (TREE_CODE (arg0) == NEGATE_EXPR)
5884 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5885 else if (! FLOAT_TYPE_P (type))
5887 if (integer_zerop (arg1))
5888 return non_lvalue (fold_convert (type, arg0));
5890 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5891 with a constant, and the two constants have no bits in common,
5892 we should treat this as a BIT_IOR_EXPR since this may produce more
5894 if (TREE_CODE (arg0) == BIT_AND_EXPR
5895 && TREE_CODE (arg1) == BIT_AND_EXPR
5896 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5897 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5898 && integer_zerop (const_binop (BIT_AND_EXPR,
5899 TREE_OPERAND (arg0, 1),
5900 TREE_OPERAND (arg1, 1), 0)))
5902 code = BIT_IOR_EXPR;
5906 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5907 (plus (plus (mult) (mult)) (foo)) so that we can
5908 take advantage of the factoring cases below. */
5909 if ((TREE_CODE (arg0) == PLUS_EXPR
5910 && TREE_CODE (arg1) == MULT_EXPR)
5911 || (TREE_CODE (arg1) == PLUS_EXPR
5912 && TREE_CODE (arg0) == MULT_EXPR))
5914 tree parg0, parg1, parg, marg;
5916 if (TREE_CODE (arg0) == PLUS_EXPR)
5917 parg = arg0, marg = arg1;
5919 parg = arg1, marg = arg0;
5920 parg0 = TREE_OPERAND (parg, 0);
5921 parg1 = TREE_OPERAND (parg, 1);
5925 if (TREE_CODE (parg0) == MULT_EXPR
5926 && TREE_CODE (parg1) != MULT_EXPR)
5927 return fold (build (PLUS_EXPR, type,
5928 fold (build (PLUS_EXPR, type,
5929 fold_convert (type, parg0),
5930 fold_convert (type, marg))),
5931 fold_convert (type, parg1)));
5932 if (TREE_CODE (parg0) != MULT_EXPR
5933 && TREE_CODE (parg1) == MULT_EXPR)
5934 return fold (build (PLUS_EXPR, type,
5935 fold (build (PLUS_EXPR, type,
5936 fold_convert (type, parg1),
5937 fold_convert (type, marg))),
5938 fold_convert (type, parg0)));
5941 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5943 tree arg00, arg01, arg10, arg11;
5944 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5946 /* (A * C) + (B * C) -> (A+B) * C.
5947 We are most concerned about the case where C is a constant,
5948 but other combinations show up during loop reduction. Since
5949 it is not difficult, try all four possibilities. */
5951 arg00 = TREE_OPERAND (arg0, 0);
5952 arg01 = TREE_OPERAND (arg0, 1);
5953 arg10 = TREE_OPERAND (arg1, 0);
5954 arg11 = TREE_OPERAND (arg1, 1);
5957 if (operand_equal_p (arg01, arg11, 0))
5958 same = arg01, alt0 = arg00, alt1 = arg10;
5959 else if (operand_equal_p (arg00, arg10, 0))
5960 same = arg00, alt0 = arg01, alt1 = arg11;
5961 else if (operand_equal_p (arg00, arg11, 0))
5962 same = arg00, alt0 = arg01, alt1 = arg10;
5963 else if (operand_equal_p (arg01, arg10, 0))
5964 same = arg01, alt0 = arg00, alt1 = arg11;
5966 /* No identical multiplicands; see if we can find a common
5967 power-of-two factor in non-power-of-two multiplies. This
5968 can help in multi-dimensional array access. */
5969 else if (TREE_CODE (arg01) == INTEGER_CST
5970 && TREE_CODE (arg11) == INTEGER_CST
5971 && TREE_INT_CST_HIGH (arg01) == 0
5972 && TREE_INT_CST_HIGH (arg11) == 0)
5974 HOST_WIDE_INT int01, int11, tmp;
5975 int01 = TREE_INT_CST_LOW (arg01);
5976 int11 = TREE_INT_CST_LOW (arg11);
5978 /* Move min of absolute values to int11. */
5979 if ((int01 >= 0 ? int01 : -int01)
5980 < (int11 >= 0 ? int11 : -int11))
5982 tmp = int01, int01 = int11, int11 = tmp;
5983 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5984 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5987 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5989 alt0 = fold (build (MULT_EXPR, type, arg00,
5990 build_int_2 (int01 / int11, 0)));
5997 return fold (build (MULT_EXPR, type,
5998 fold (build (PLUS_EXPR, type, alt0, alt1)),
6004 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6005 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6006 return non_lvalue (fold_convert (type, arg0));
6008 /* Likewise if the operands are reversed. */
6009 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6010 return non_lvalue (fold_convert (type, arg1));
6012 /* Convert x+x into x*2.0. */
6013 if (operand_equal_p (arg0, arg1, 0)
6014 && SCALAR_FLOAT_TYPE_P (type))
6015 return fold (build (MULT_EXPR, type, arg0,
6016 build_real (type, dconst2)));
6018 /* Convert x*c+x into x*(c+1). */
6019 if (flag_unsafe_math_optimizations
6020 && TREE_CODE (arg0) == MULT_EXPR
6021 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6022 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6023 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6027 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6028 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6029 return fold (build (MULT_EXPR, type, arg1,
6030 build_real (type, c)));
6033 /* Convert x+x*c into x*(c+1). */
6034 if (flag_unsafe_math_optimizations
6035 && TREE_CODE (arg1) == MULT_EXPR
6036 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6037 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6038 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6042 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6043 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6044 return fold (build (MULT_EXPR, type, arg0,
6045 build_real (type, c)));
6048 /* Convert x*c1+x*c2 into x*(c1+c2). */
6049 if (flag_unsafe_math_optimizations
6050 && TREE_CODE (arg0) == MULT_EXPR
6051 && TREE_CODE (arg1) == MULT_EXPR
6052 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6053 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6054 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6055 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6056 && operand_equal_p (TREE_OPERAND (arg0, 0),
6057 TREE_OPERAND (arg1, 0), 0))
6059 REAL_VALUE_TYPE c1, c2;
6061 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6062 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6063 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6064 return fold (build (MULT_EXPR, type,
6065 TREE_OPERAND (arg0, 0),
6066 build_real (type, c1)));
6071 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6072 is a rotate of A by C1 bits. */
6073 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6074 is a rotate of A by B bits. */
6076 enum tree_code code0, code1;
6077 code0 = TREE_CODE (arg0);
6078 code1 = TREE_CODE (arg1);
6079 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6080 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6081 && operand_equal_p (TREE_OPERAND (arg0, 0),
6082 TREE_OPERAND (arg1, 0), 0)
6083 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6085 tree tree01, tree11;
6086 enum tree_code code01, code11;
6088 tree01 = TREE_OPERAND (arg0, 1);
6089 tree11 = TREE_OPERAND (arg1, 1);
6090 STRIP_NOPS (tree01);
6091 STRIP_NOPS (tree11);
6092 code01 = TREE_CODE (tree01);
6093 code11 = TREE_CODE (tree11);
6094 if (code01 == INTEGER_CST
6095 && code11 == INTEGER_CST
6096 && TREE_INT_CST_HIGH (tree01) == 0
6097 && TREE_INT_CST_HIGH (tree11) == 0
6098 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6099 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6100 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6101 code0 == LSHIFT_EXPR ? tree01 : tree11);
6102 else if (code11 == MINUS_EXPR)
6104 tree tree110, tree111;
6105 tree110 = TREE_OPERAND (tree11, 0);
6106 tree111 = TREE_OPERAND (tree11, 1);
6107 STRIP_NOPS (tree110);
6108 STRIP_NOPS (tree111);
6109 if (TREE_CODE (tree110) == INTEGER_CST
6110 && 0 == compare_tree_int (tree110,
6112 (TREE_TYPE (TREE_OPERAND
6114 && operand_equal_p (tree01, tree111, 0))
6115 return build ((code0 == LSHIFT_EXPR
6118 type, TREE_OPERAND (arg0, 0), tree01);
6120 else if (code01 == MINUS_EXPR)
6122 tree tree010, tree011;
6123 tree010 = TREE_OPERAND (tree01, 0);
6124 tree011 = TREE_OPERAND (tree01, 1);
6125 STRIP_NOPS (tree010);
6126 STRIP_NOPS (tree011);
6127 if (TREE_CODE (tree010) == INTEGER_CST
6128 && 0 == compare_tree_int (tree010,
6130 (TREE_TYPE (TREE_OPERAND
6132 && operand_equal_p (tree11, tree011, 0))
6133 return build ((code0 != LSHIFT_EXPR
6136 type, TREE_OPERAND (arg0, 0), tree11);
6142 /* In most languages, can't associate operations on floats through
6143 parentheses. Rather than remember where the parentheses were, we
6144 don't associate floats at all, unless the user has specified
6145 -funsafe-math-optimizations. */
6148 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6150 tree var0, con0, lit0, minus_lit0;
6151 tree var1, con1, lit1, minus_lit1;
6153 /* Split both trees into variables, constants, and literals. Then
6154 associate each group together, the constants with literals,
6155 then the result with variables. This increases the chances of
6156 literals being recombined later and of generating relocatable
6157 expressions for the sum of a constant and literal. */
6158 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6159 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6160 code == MINUS_EXPR);
6162 /* Only do something if we found more than two objects. Otherwise,
6163 nothing has changed and we risk infinite recursion. */
6164 if (2 < ((var0 != 0) + (var1 != 0)
6165 + (con0 != 0) + (con1 != 0)
6166 + (lit0 != 0) + (lit1 != 0)
6167 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6169 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6170 if (code == MINUS_EXPR)
6173 var0 = associate_trees (var0, var1, code, type);
6174 con0 = associate_trees (con0, con1, code, type);
6175 lit0 = associate_trees (lit0, lit1, code, type);
6176 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6178 /* Preserve the MINUS_EXPR if the negative part of the literal is
6179 greater than the positive part. Otherwise, the multiplicative
6180 folding code (i.e extract_muldiv) may be fooled in case
6181 unsigned constants are subtracted, like in the following
6182 example: ((X*2 + 4) - 8U)/2. */
6183 if (minus_lit0 && lit0)
6185 if (TREE_CODE (lit0) == INTEGER_CST
6186 && TREE_CODE (minus_lit0) == INTEGER_CST
6187 && tree_int_cst_lt (lit0, minus_lit0))
6189 minus_lit0 = associate_trees (minus_lit0, lit0,
6195 lit0 = associate_trees (lit0, minus_lit0,
6203 return fold_convert (type,
6204 associate_trees (var0, minus_lit0,
6208 con0 = associate_trees (con0, minus_lit0,
6210 return fold_convert (type,
6211 associate_trees (var0, con0,
6216 con0 = associate_trees (con0, lit0, code, type);
6217 return fold_convert (type, associate_trees (var0, con0,
6224 t1 = const_binop (code, arg0, arg1, 0);
6225 if (t1 != NULL_TREE)
6227 /* The return value should always have
6228 the same type as the original expression. */
6229 if (TREE_TYPE (t1) != TREE_TYPE (t))
6230 t1 = fold_convert (TREE_TYPE (t), t1);
6237 /* A - (-B) -> A + B */
6238 if (TREE_CODE (arg1) == NEGATE_EXPR)
6239 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6240 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6241 if (TREE_CODE (arg0) == NEGATE_EXPR
6242 && (FLOAT_TYPE_P (type)
6243 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6244 && negate_expr_p (arg1)
6245 && reorder_operands_p (arg0, arg1))
6246 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6247 TREE_OPERAND (arg0, 0)));
6249 if (! FLOAT_TYPE_P (type))
6251 if (! wins && integer_zerop (arg0))
6252 return negate_expr (fold_convert (type, arg1));
6253 if (integer_zerop (arg1))
6254 return non_lvalue (fold_convert (type, arg0));
6256 /* Fold A - (A & B) into ~B & A. */
6257 if (!TREE_SIDE_EFFECTS (arg0)
6258 && TREE_CODE (arg1) == BIT_AND_EXPR)
6260 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6261 return fold (build (BIT_AND_EXPR, type,
6262 fold (build1 (BIT_NOT_EXPR, type,
6263 TREE_OPERAND (arg1, 0))),
6265 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6266 return fold (build (BIT_AND_EXPR, type,
6267 fold (build1 (BIT_NOT_EXPR, type,
6268 TREE_OPERAND (arg1, 1))),
6272 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6273 any power of 2 minus 1. */
6274 if (TREE_CODE (arg0) == BIT_AND_EXPR
6275 && TREE_CODE (arg1) == BIT_AND_EXPR
6276 && operand_equal_p (TREE_OPERAND (arg0, 0),
6277 TREE_OPERAND (arg1, 0), 0))
6279 tree mask0 = TREE_OPERAND (arg0, 1);
6280 tree mask1 = TREE_OPERAND (arg1, 1);
6281 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6283 if (operand_equal_p (tem, mask1, 0))
6285 tem = fold (build (BIT_XOR_EXPR, type,
6286 TREE_OPERAND (arg0, 0), mask1));
6287 return fold (build (MINUS_EXPR, type, tem, mask1));
6292 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6293 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6294 return non_lvalue (fold_convert (type, arg0));
6296 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6297 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6298 (-ARG1 + ARG0) reduces to -ARG1. */
6299 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6300 return negate_expr (fold_convert (type, arg1));
6302 /* Fold &x - &x. This can happen from &x.foo - &x.
6303 This is unsafe for certain floats even in non-IEEE formats.
6304 In IEEE, it is unsafe because it does wrong for NaNs.
6305 Also note that operand_equal_p is always false if an operand
6308 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6309 && operand_equal_p (arg0, arg1, 0))
6310 return fold_convert (type, integer_zero_node);
6312 /* A - B -> A + (-B) if B is easily negatable. */
6313 if (!wins && negate_expr_p (arg1)
6314 && (FLOAT_TYPE_P (type)
6315 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6316 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6318 if (TREE_CODE (arg0) == MULT_EXPR
6319 && TREE_CODE (arg1) == MULT_EXPR
6320 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6322 /* (A * C) - (B * C) -> (A-B) * C. */
6323 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6324 TREE_OPERAND (arg1, 1), 0))
6325 return fold (build (MULT_EXPR, type,
6326 fold (build (MINUS_EXPR, type,
6327 TREE_OPERAND (arg0, 0),
6328 TREE_OPERAND (arg1, 0))),
6329 TREE_OPERAND (arg0, 1)));
6330 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6331 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6332 TREE_OPERAND (arg1, 0), 0))
6333 return fold (build (MULT_EXPR, type,
6334 TREE_OPERAND (arg0, 0),
6335 fold (build (MINUS_EXPR, type,
6336 TREE_OPERAND (arg0, 1),
6337 TREE_OPERAND (arg1, 1)))));
6343 /* (-A) * (-B) -> A * B */
6344 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6345 return fold (build (MULT_EXPR, type,
6346 TREE_OPERAND (arg0, 0),
6347 negate_expr (arg1)));
6348 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6349 return fold (build (MULT_EXPR, type,
6351 TREE_OPERAND (arg1, 0)));
6353 if (! FLOAT_TYPE_P (type))
6355 if (integer_zerop (arg1))
6356 return omit_one_operand (type, arg1, arg0);
6357 if (integer_onep (arg1))
6358 return non_lvalue (fold_convert (type, arg0));
6360 /* (a * (1 << b)) is (a << b) */
6361 if (TREE_CODE (arg1) == LSHIFT_EXPR
6362 && integer_onep (TREE_OPERAND (arg1, 0)))
6363 return fold (build (LSHIFT_EXPR, type, arg0,
6364 TREE_OPERAND (arg1, 1)));
6365 if (TREE_CODE (arg0) == LSHIFT_EXPR
6366 && integer_onep (TREE_OPERAND (arg0, 0)))
6367 return fold (build (LSHIFT_EXPR, type, arg1,
6368 TREE_OPERAND (arg0, 1)));
6370 if (TREE_CODE (arg1) == INTEGER_CST
6371 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6372 fold_convert (type, arg1),
6374 return fold_convert (type, tem);
6379 /* Maybe fold x * 0 to 0. The expressions aren't the same
6380 when x is NaN, since x * 0 is also NaN. Nor are they the
6381 same in modes with signed zeros, since multiplying a
6382 negative value by 0 gives -0, not +0. */
6383 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6384 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6385 && real_zerop (arg1))
6386 return omit_one_operand (type, arg1, arg0);
6387 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6388 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6389 && real_onep (arg1))
6390 return non_lvalue (fold_convert (type, arg0));
6392 /* Transform x * -1.0 into -x. */
6393 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6394 && real_minus_onep (arg1))
6395 return fold (build1 (NEGATE_EXPR, type, arg0));
6397 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6398 if (flag_unsafe_math_optimizations
6399 && TREE_CODE (arg0) == RDIV_EXPR
6400 && TREE_CODE (arg1) == REAL_CST
6401 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6403 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6406 return fold (build (RDIV_EXPR, type, tem,
6407 TREE_OPERAND (arg0, 1)));
6410 if (flag_unsafe_math_optimizations)
6412 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6413 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6415 /* Optimizations of sqrt(...)*sqrt(...). */
6416 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6417 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6418 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6420 tree sqrtfn, arg, arglist;
6421 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6422 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6424 /* Optimize sqrt(x)*sqrt(x) as x. */
6425 if (operand_equal_p (arg00, arg10, 0)
6426 && ! HONOR_SNANS (TYPE_MODE (type)))
6429 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6430 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6431 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6432 arglist = build_tree_list (NULL_TREE, arg);
6433 return build_function_call_expr (sqrtfn, arglist);
6436 /* Optimize expN(x)*expN(y) as expN(x+y). */
6437 if (fcode0 == fcode1
6438 && (fcode0 == BUILT_IN_EXP
6439 || fcode0 == BUILT_IN_EXPF
6440 || fcode0 == BUILT_IN_EXPL
6441 || fcode0 == BUILT_IN_EXP2
6442 || fcode0 == BUILT_IN_EXP2F
6443 || fcode0 == BUILT_IN_EXP2L
6444 || fcode0 == BUILT_IN_EXP10
6445 || fcode0 == BUILT_IN_EXP10F
6446 || fcode0 == BUILT_IN_EXP10L
6447 || fcode0 == BUILT_IN_POW10
6448 || fcode0 == BUILT_IN_POW10F
6449 || fcode0 == BUILT_IN_POW10L))
6451 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6452 tree arg = build (PLUS_EXPR, type,
6453 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6454 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6455 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6456 return build_function_call_expr (expfn, arglist);
6459 /* Optimizations of pow(...)*pow(...). */
6460 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6461 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6462 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6464 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6465 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6467 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6468 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6471 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6472 if (operand_equal_p (arg01, arg11, 0))
6474 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6475 tree arg = build (MULT_EXPR, type, arg00, arg10);
6476 tree arglist = tree_cons (NULL_TREE, fold (arg),
6477 build_tree_list (NULL_TREE,
6479 return build_function_call_expr (powfn, arglist);
6482 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6483 if (operand_equal_p (arg00, arg10, 0))
6485 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6486 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6487 tree arglist = tree_cons (NULL_TREE, arg00,
6488 build_tree_list (NULL_TREE,
6490 return build_function_call_expr (powfn, arglist);
6494 /* Optimize tan(x)*cos(x) as sin(x). */
6495 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6496 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6497 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6498 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6499 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6500 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6501 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6502 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6510 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6514 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6518 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6524 if (sinfn != NULL_TREE)
6525 return build_function_call_expr (sinfn,
6526 TREE_OPERAND (arg0, 1));
6529 /* Optimize x*pow(x,c) as pow(x,c+1). */
6530 if (fcode1 == BUILT_IN_POW
6531 || fcode1 == BUILT_IN_POWF
6532 || fcode1 == BUILT_IN_POWL)
6534 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6535 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6537 if (TREE_CODE (arg11) == REAL_CST
6538 && ! TREE_CONSTANT_OVERFLOW (arg11)
6539 && operand_equal_p (arg0, arg10, 0))
6541 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6545 c = TREE_REAL_CST (arg11);
6546 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6547 arg = build_real (type, c);
6548 arglist = build_tree_list (NULL_TREE, arg);
6549 arglist = tree_cons (NULL_TREE, arg0, arglist);
6550 return build_function_call_expr (powfn, arglist);
6554 /* Optimize pow(x,c)*x as pow(x,c+1). */
6555 if (fcode0 == BUILT_IN_POW
6556 || fcode0 == BUILT_IN_POWF
6557 || fcode0 == BUILT_IN_POWL)
6559 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6560 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6562 if (TREE_CODE (arg01) == REAL_CST
6563 && ! TREE_CONSTANT_OVERFLOW (arg01)
6564 && operand_equal_p (arg1, arg00, 0))
6566 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6570 c = TREE_REAL_CST (arg01);
6571 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6572 arg = build_real (type, c);
6573 arglist = build_tree_list (NULL_TREE, arg);
6574 arglist = tree_cons (NULL_TREE, arg1, arglist);
6575 return build_function_call_expr (powfn, arglist);
6579 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6581 && operand_equal_p (arg0, arg1, 0))
6585 if (type == double_type_node)
6586 powfn = implicit_built_in_decls[BUILT_IN_POW];
6587 else if (type == float_type_node)
6588 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6589 else if (type == long_double_type_node)
6590 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6596 tree arg = build_real (type, dconst2);
6597 tree arglist = build_tree_list (NULL_TREE, arg);
6598 arglist = tree_cons (NULL_TREE, arg0, arglist);
6599 return build_function_call_expr (powfn, arglist);
6608 if (integer_all_onesp (arg1))
6609 return omit_one_operand (type, arg1, arg0);
6610 if (integer_zerop (arg1))
6611 return non_lvalue (fold_convert (type, arg0));
6612 t1 = distribute_bit_expr (code, type, arg0, arg1);
6613 if (t1 != NULL_TREE)
6616 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6618 This results in more efficient code for machines without a NAND
6619 instruction. Combine will canonicalize to the first form
6620 which will allow use of NAND instructions provided by the
6621 backend if they exist. */
6622 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6623 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6625 return fold (build1 (BIT_NOT_EXPR, type,
6626 build (BIT_AND_EXPR, type,
6627 TREE_OPERAND (arg0, 0),
6628 TREE_OPERAND (arg1, 0))));
6631 /* See if this can be simplified into a rotate first. If that
6632 is unsuccessful continue in the association code. */
6636 if (integer_zerop (arg1))
6637 return non_lvalue (fold_convert (type, arg0));
6638 if (integer_all_onesp (arg1))
6639 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6641 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6642 with a constant, and the two constants have no bits in common,
6643 we should treat this as a BIT_IOR_EXPR since this may produce more
6645 if (TREE_CODE (arg0) == BIT_AND_EXPR
6646 && TREE_CODE (arg1) == BIT_AND_EXPR
6647 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6648 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6649 && integer_zerop (const_binop (BIT_AND_EXPR,
6650 TREE_OPERAND (arg0, 1),
6651 TREE_OPERAND (arg1, 1), 0)))
6653 code = BIT_IOR_EXPR;
6657 /* See if this can be simplified into a rotate first. If that
6658 is unsuccessful continue in the association code. */
6662 if (integer_all_onesp (arg1))
6663 return non_lvalue (fold_convert (type, arg0));
6664 if (integer_zerop (arg1))
6665 return omit_one_operand (type, arg1, arg0);
6666 t1 = distribute_bit_expr (code, type, arg0, arg1);
6667 if (t1 != NULL_TREE)
6669 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6670 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6671 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6674 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6676 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6677 && (~TREE_INT_CST_LOW (arg1)
6678 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6679 return fold_convert (type, TREE_OPERAND (arg0, 0));
6682 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6684 This results in more efficient code for machines without a NOR
6685 instruction. Combine will canonicalize to the first form
6686 which will allow use of NOR instructions provided by the
6687 backend if they exist. */
6688 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6689 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6691 return fold (build1 (BIT_NOT_EXPR, type,
6692 build (BIT_IOR_EXPR, type,
6693 TREE_OPERAND (arg0, 0),
6694 TREE_OPERAND (arg1, 0))));
6700 /* Don't touch a floating-point divide by zero unless the mode
6701 of the constant can represent infinity. */
6702 if (TREE_CODE (arg1) == REAL_CST
6703 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6704 && real_zerop (arg1))
6707 /* (-A) / (-B) -> A / B */
6708 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6709 return fold (build (RDIV_EXPR, type,
6710 TREE_OPERAND (arg0, 0),
6711 negate_expr (arg1)));
6712 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6713 return fold (build (RDIV_EXPR, type,
6715 TREE_OPERAND (arg1, 0)));
6717 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6718 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6719 && real_onep (arg1))
6720 return non_lvalue (fold_convert (type, arg0));
6722 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6723 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6724 && real_minus_onep (arg1))
6725 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6727 /* If ARG1 is a constant, we can convert this to a multiply by the
6728 reciprocal. This does not have the same rounding properties,
6729 so only do this if -funsafe-math-optimizations. We can actually
6730 always safely do it if ARG1 is a power of two, but it's hard to
6731 tell if it is or not in a portable manner. */
6732 if (TREE_CODE (arg1) == REAL_CST)
6734 if (flag_unsafe_math_optimizations
6735 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6737 return fold (build (MULT_EXPR, type, arg0, tem));
6738 /* Find the reciprocal if optimizing and the result is exact. */
6742 r = TREE_REAL_CST (arg1);
6743 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6745 tem = build_real (type, r);
6746 return fold (build (MULT_EXPR, type, arg0, tem));
6750 /* Convert A/B/C to A/(B*C). */
6751 if (flag_unsafe_math_optimizations
6752 && TREE_CODE (arg0) == RDIV_EXPR)
6753 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6754 fold (build (MULT_EXPR, type,
6755 TREE_OPERAND (arg0, 1), arg1))));
6757 /* Convert A/(B/C) to (A/B)*C. */
6758 if (flag_unsafe_math_optimizations
6759 && TREE_CODE (arg1) == RDIV_EXPR)
6760 return fold (build (MULT_EXPR, type,
6761 fold (build (RDIV_EXPR, type, arg0,
6762 TREE_OPERAND (arg1, 0))),
6763 TREE_OPERAND (arg1, 1)));
6765 /* Convert C1/(X*C2) into (C1/C2)/X. */
6766 if (flag_unsafe_math_optimizations
6767 && TREE_CODE (arg1) == MULT_EXPR
6768 && TREE_CODE (arg0) == REAL_CST
6769 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6771 tree tem = const_binop (RDIV_EXPR, arg0,
6772 TREE_OPERAND (arg1, 1), 0);
6774 return fold (build (RDIV_EXPR, type, tem,
6775 TREE_OPERAND (arg1, 0)));
6778 if (flag_unsafe_math_optimizations)
6780 enum built_in_function fcode = builtin_mathfn_code (arg1);
6781 /* Optimize x/expN(y) into x*expN(-y). */
6782 if (fcode == BUILT_IN_EXP
6783 || fcode == BUILT_IN_EXPF
6784 || fcode == BUILT_IN_EXPL
6785 || fcode == BUILT_IN_EXP2
6786 || fcode == BUILT_IN_EXP2F
6787 || fcode == BUILT_IN_EXP2L
6788 || fcode == BUILT_IN_EXP10
6789 || fcode == BUILT_IN_EXP10F
6790 || fcode == BUILT_IN_EXP10L
6791 || fcode == BUILT_IN_POW10
6792 || fcode == BUILT_IN_POW10F
6793 || fcode == BUILT_IN_POW10L)
6795 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6796 tree arg = build1 (NEGATE_EXPR, type,
6797 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6798 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6799 arg1 = build_function_call_expr (expfn, arglist);
6800 return fold (build (MULT_EXPR, type, arg0, arg1));
6803 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6804 if (fcode == BUILT_IN_POW
6805 || fcode == BUILT_IN_POWF
6806 || fcode == BUILT_IN_POWL)
6808 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6809 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6810 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6811 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6812 tree arglist = tree_cons(NULL_TREE, arg10,
6813 build_tree_list (NULL_TREE, neg11));
6814 arg1 = build_function_call_expr (powfn, arglist);
6815 return fold (build (MULT_EXPR, type, arg0, arg1));
6819 if (flag_unsafe_math_optimizations)
6821 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6822 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6824 /* Optimize sin(x)/cos(x) as tan(x). */
6825 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6826 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6827 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6828 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6829 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6833 if (fcode0 == BUILT_IN_SIN)
6834 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6835 else if (fcode0 == BUILT_IN_SINF)
6836 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6837 else if (fcode0 == BUILT_IN_SINL)
6838 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6842 if (tanfn != NULL_TREE)
6843 return build_function_call_expr (tanfn,
6844 TREE_OPERAND (arg0, 1));
6847 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6848 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6849 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6850 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6851 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6852 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6856 if (fcode0 == BUILT_IN_COS)
6857 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6858 else if (fcode0 == BUILT_IN_COSF)
6859 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6860 else if (fcode0 == BUILT_IN_COSL)
6861 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6865 if (tanfn != NULL_TREE)
6867 tree tmp = TREE_OPERAND (arg0, 1);
6868 tmp = build_function_call_expr (tanfn, tmp);
6869 return fold (build (RDIV_EXPR, type,
6870 build_real (type, dconst1),
6875 /* Optimize pow(x,c)/x as pow(x,c-1). */
6876 if (fcode0 == BUILT_IN_POW
6877 || fcode0 == BUILT_IN_POWF
6878 || fcode0 == BUILT_IN_POWL)
6880 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6881 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6882 if (TREE_CODE (arg01) == REAL_CST
6883 && ! TREE_CONSTANT_OVERFLOW (arg01)
6884 && operand_equal_p (arg1, arg00, 0))
6886 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6890 c = TREE_REAL_CST (arg01);
6891 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6892 arg = build_real (type, c);
6893 arglist = build_tree_list (NULL_TREE, arg);
6894 arglist = tree_cons (NULL_TREE, arg1, arglist);
6895 return build_function_call_expr (powfn, arglist);
6901 case TRUNC_DIV_EXPR:
6902 case ROUND_DIV_EXPR:
6903 case FLOOR_DIV_EXPR:
6905 case EXACT_DIV_EXPR:
6906 if (integer_onep (arg1))
6907 return non_lvalue (fold_convert (type, arg0));
6908 if (integer_zerop (arg1))
6911 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6912 operation, EXACT_DIV_EXPR.
6914 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6915 At one time others generated faster code, it's not clear if they do
6916 after the last round to changes to the DIV code in expmed.c. */
6917 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6918 && multiple_of_p (type, arg0, arg1))
6919 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6921 if (TREE_CODE (arg1) == INTEGER_CST
6922 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6924 return fold_convert (type, tem);
6929 case FLOOR_MOD_EXPR:
6930 case ROUND_MOD_EXPR:
6931 case TRUNC_MOD_EXPR:
6932 if (integer_onep (arg1))
6933 return omit_one_operand (type, integer_zero_node, arg0);
6934 if (integer_zerop (arg1))
6937 if (TREE_CODE (arg1) == INTEGER_CST
6938 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6940 return fold_convert (type, tem);
6946 if (integer_all_onesp (arg0))
6947 return omit_one_operand (type, arg0, arg1);
6951 /* Optimize -1 >> x for arithmetic right shifts. */
6952 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6953 return omit_one_operand (type, arg0, arg1);
6954 /* ... fall through ... */
6958 if (integer_zerop (arg1))
6959 return non_lvalue (fold_convert (type, arg0));
6960 if (integer_zerop (arg0))
6961 return omit_one_operand (type, arg0, arg1);
6963 /* Since negative shift count is not well-defined,
6964 don't try to compute it in the compiler. */
6965 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6967 /* Rewrite an LROTATE_EXPR by a constant into an
6968 RROTATE_EXPR by a new constant. */
6969 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6971 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6972 tem = fold_convert (TREE_TYPE (arg1), tem);
6973 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6974 return fold (build (RROTATE_EXPR, type, arg0, tem));
6977 /* If we have a rotate of a bit operation with the rotate count and
6978 the second operand of the bit operation both constant,
6979 permute the two operations. */
6980 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6981 && (TREE_CODE (arg0) == BIT_AND_EXPR
6982 || TREE_CODE (arg0) == BIT_IOR_EXPR
6983 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6984 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6985 return fold (build (TREE_CODE (arg0), type,
6986 fold (build (code, type,
6987 TREE_OPERAND (arg0, 0), arg1)),
6988 fold (build (code, type,
6989 TREE_OPERAND (arg0, 1), arg1))));
6991 /* Two consecutive rotates adding up to the width of the mode can
6993 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6994 && TREE_CODE (arg0) == RROTATE_EXPR
6995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6996 && TREE_INT_CST_HIGH (arg1) == 0
6997 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6998 && ((TREE_INT_CST_LOW (arg1)
6999 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7000 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7001 return TREE_OPERAND (arg0, 0);
7006 if (operand_equal_p (arg0, arg1, 0))
7007 return omit_one_operand (type, arg0, arg1);
7008 if (INTEGRAL_TYPE_P (type)
7009 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
7010 return omit_one_operand (type, arg1, arg0);
7014 if (operand_equal_p (arg0, arg1, 0))
7015 return omit_one_operand (type, arg0, arg1);
7016 if (INTEGRAL_TYPE_P (type)
7017 && TYPE_MAX_VALUE (type)
7018 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
7019 return omit_one_operand (type, arg1, arg0);
7022 case TRUTH_NOT_EXPR:
7023 /* Note that the operand of this must be an int
7024 and its values must be 0 or 1.
7025 ("true" is a fixed value perhaps depending on the language,
7026 but we don't handle values other than 1 correctly yet.) */
7027 tem = invert_truthvalue (arg0);
7028 /* Avoid infinite recursion. */
7029 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7031 tem = fold_single_bit_test (code, arg0, arg1, type);
7036 return fold_convert (type, tem);
7038 case TRUTH_ANDIF_EXPR:
7039 /* Note that the operands of this must be ints
7040 and their values must be 0 or 1.
7041 ("true" is a fixed value perhaps depending on the language.) */
7042 /* If first arg is constant zero, return it. */
7043 if (integer_zerop (arg0))
7044 return fold_convert (type, arg0);
7045 case TRUTH_AND_EXPR:
7046 /* If either arg is constant true, drop it. */
7047 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7048 return non_lvalue (fold_convert (type, arg1));
7049 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7050 /* Preserve sequence points. */
7051 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7052 return non_lvalue (fold_convert (type, arg0));
7053 /* If second arg is constant zero, result is zero, but first arg
7054 must be evaluated. */
7055 if (integer_zerop (arg1))
7056 return omit_one_operand (type, arg1, arg0);
7057 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7058 case will be handled here. */
7059 if (integer_zerop (arg0))
7060 return omit_one_operand (type, arg0, arg1);
7063 /* We only do these simplifications if we are optimizing. */
7067 /* Check for things like (A || B) && (A || C). We can convert this
7068 to A || (B && C). Note that either operator can be any of the four
7069 truth and/or operations and the transformation will still be
7070 valid. Also note that we only care about order for the
7071 ANDIF and ORIF operators. If B contains side effects, this
7072 might change the truth-value of A. */
7073 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7074 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7075 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7076 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7077 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7078 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7080 tree a00 = TREE_OPERAND (arg0, 0);
7081 tree a01 = TREE_OPERAND (arg0, 1);
7082 tree a10 = TREE_OPERAND (arg1, 0);
7083 tree a11 = TREE_OPERAND (arg1, 1);
7084 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7085 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7086 && (code == TRUTH_AND_EXPR
7087 || code == TRUTH_OR_EXPR));
7089 if (operand_equal_p (a00, a10, 0))
7090 return fold (build (TREE_CODE (arg0), type, a00,
7091 fold (build (code, type, a01, a11))));
7092 else if (commutative && operand_equal_p (a00, a11, 0))
7093 return fold (build (TREE_CODE (arg0), type, a00,
7094 fold (build (code, type, a01, a10))));
7095 else if (commutative && operand_equal_p (a01, a10, 0))
7096 return fold (build (TREE_CODE (arg0), type, a01,
7097 fold (build (code, type, a00, a11))));
7099 /* This case if tricky because we must either have commutative
7100 operators or else A10 must not have side-effects. */
7102 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7103 && operand_equal_p (a01, a11, 0))
7104 return fold (build (TREE_CODE (arg0), type,
7105 fold (build (code, type, a00, a10)),
7109 /* See if we can build a range comparison. */
7110 if (0 != (tem = fold_range_test (t)))
7113 /* Check for the possibility of merging component references. If our
7114 lhs is another similar operation, try to merge its rhs with our
7115 rhs. Then try to merge our lhs and rhs. */
7116 if (TREE_CODE (arg0) == code
7117 && 0 != (tem = fold_truthop (code, type,
7118 TREE_OPERAND (arg0, 1), arg1)))
7119 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7121 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7126 case TRUTH_ORIF_EXPR:
7127 /* Note that the operands of this must be ints
7128 and their values must be 0 or true.
7129 ("true" is a fixed value perhaps depending on the language.) */
7130 /* If first arg is constant true, return it. */
7131 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7132 return fold_convert (type, arg0);
7134 /* If either arg is constant zero, drop it. */
7135 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7136 return non_lvalue (fold_convert (type, arg1));
7137 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7138 /* Preserve sequence points. */
7139 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7140 return non_lvalue (fold_convert (type, arg0));
7141 /* If second arg is constant true, result is true, but we must
7142 evaluate first arg. */
7143 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7144 return omit_one_operand (type, arg1, arg0);
7145 /* Likewise for first arg, but note this only occurs here for
7147 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7148 return omit_one_operand (type, arg0, arg1);
7151 case TRUTH_XOR_EXPR:
7152 /* If either arg is constant zero, drop it. */
7153 if (integer_zerop (arg0))
7154 return non_lvalue (fold_convert (type, arg1));
7155 if (integer_zerop (arg1))
7156 return non_lvalue (fold_convert (type, arg0));
7157 /* If either arg is constant true, this is a logical inversion. */
7158 if (integer_onep (arg0))
7159 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7160 if (integer_onep (arg1))
7161 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7170 /* If one arg is a real or integer constant, put it last. */
7171 if (tree_swap_operands_p (arg0, arg1, true))
7172 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7174 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7176 tree targ0 = strip_float_extensions (arg0);
7177 tree targ1 = strip_float_extensions (arg1);
7178 tree newtype = TREE_TYPE (targ0);
7180 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7181 newtype = TREE_TYPE (targ1);
7183 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7184 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7185 return fold (build (code, type, fold_convert (newtype, targ0),
7186 fold_convert (newtype, targ1)));
7188 /* (-a) CMP (-b) -> b CMP a */
7189 if (TREE_CODE (arg0) == NEGATE_EXPR
7190 && TREE_CODE (arg1) == NEGATE_EXPR)
7191 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7192 TREE_OPERAND (arg0, 0)));
7194 if (TREE_CODE (arg1) == REAL_CST)
7196 REAL_VALUE_TYPE cst;
7197 cst = TREE_REAL_CST (arg1);
7199 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7200 if (TREE_CODE (arg0) == NEGATE_EXPR)
7202 fold (build (swap_tree_comparison (code), type,
7203 TREE_OPERAND (arg0, 0),
7204 build_real (TREE_TYPE (arg1),
7205 REAL_VALUE_NEGATE (cst))));
7207 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7208 /* a CMP (-0) -> a CMP 0 */
7209 if (REAL_VALUE_MINUS_ZERO (cst))
7210 return fold (build (code, type, arg0,
7211 build_real (TREE_TYPE (arg1), dconst0)));
7213 /* x != NaN is always true, other ops are always false. */
7214 if (REAL_VALUE_ISNAN (cst)
7215 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7217 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7218 return omit_one_operand (type, fold_convert (type, t), arg0);
7221 /* Fold comparisons against infinity. */
7222 if (REAL_VALUE_ISINF (cst))
7224 tem = fold_inf_compare (code, type, arg0, arg1);
7225 if (tem != NULL_TREE)
7230 /* If this is a comparison of a real constant with a PLUS_EXPR
7231 or a MINUS_EXPR of a real constant, we can convert it into a
7232 comparison with a revised real constant as long as no overflow
7233 occurs when unsafe_math_optimizations are enabled. */
7234 if (flag_unsafe_math_optimizations
7235 && TREE_CODE (arg1) == REAL_CST
7236 && (TREE_CODE (arg0) == PLUS_EXPR
7237 || TREE_CODE (arg0) == MINUS_EXPR)
7238 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7239 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7240 ? MINUS_EXPR : PLUS_EXPR,
7241 arg1, TREE_OPERAND (arg0, 1), 0))
7242 && ! TREE_CONSTANT_OVERFLOW (tem))
7243 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7245 /* Likewise, we can simplify a comparison of a real constant with
7246 a MINUS_EXPR whose first operand is also a real constant, i.e.
7247 (c1 - x) < c2 becomes x > c1-c2. */
7248 if (flag_unsafe_math_optimizations
7249 && TREE_CODE (arg1) == REAL_CST
7250 && TREE_CODE (arg0) == MINUS_EXPR
7251 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7252 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7254 && ! TREE_CONSTANT_OVERFLOW (tem))
7255 return fold (build (swap_tree_comparison (code), type,
7256 TREE_OPERAND (arg0, 1), tem));
7258 /* Fold comparisons against built-in math functions. */
7259 if (TREE_CODE (arg1) == REAL_CST
7260 && flag_unsafe_math_optimizations
7261 && ! flag_errno_math)
7263 enum built_in_function fcode = builtin_mathfn_code (arg0);
7265 if (fcode != END_BUILTINS)
7267 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7268 if (tem != NULL_TREE)
7274 /* Convert foo++ == CONST into ++foo == CONST + INCR.
7275 First, see if one arg is constant; find the constant arg
7276 and the other one. */
7278 tree constop = 0, varop = NULL_TREE;
7279 int constopnum = -1;
7281 if (TREE_CONSTANT (arg1))
7282 constopnum = 1, constop = arg1, varop = arg0;
7283 if (TREE_CONSTANT (arg0))
7284 constopnum = 0, constop = arg0, varop = arg1;
7286 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
7288 /* This optimization is invalid for ordered comparisons
7289 if CONST+INCR overflows or if foo+incr might overflow.
7290 This optimization is invalid for floating point due to rounding.
7291 For pointer types we assume overflow doesn't happen. */
7292 if (POINTER_TYPE_P (TREE_TYPE (varop))
7293 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7294 && (code == EQ_EXPR || code == NE_EXPR)))
7297 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
7298 constop, TREE_OPERAND (varop, 1)));
7300 /* Do not overwrite the current varop to be a preincrement,
7301 create a new node so that we won't confuse our caller who
7302 might create trees and throw them away, reusing the
7303 arguments that they passed to build. This shows up in
7304 the THEN or ELSE parts of ?: being postincrements. */
7305 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7306 TREE_OPERAND (varop, 0),
7307 TREE_OPERAND (varop, 1));
7309 /* If VAROP is a reference to a bitfield, we must mask
7310 the constant by the width of the field. */
7311 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7312 && DECL_BIT_FIELD(TREE_OPERAND
7313 (TREE_OPERAND (varop, 0), 1)))
7316 = TREE_INT_CST_LOW (DECL_SIZE
7318 (TREE_OPERAND (varop, 0), 1)));
7319 tree mask, unsigned_type;
7320 unsigned int precision;
7321 tree folded_compare;
7323 /* First check whether the comparison would come out
7324 always the same. If we don't do that we would
7325 change the meaning with the masking. */
7326 if (constopnum == 0)
7327 folded_compare = fold (build (code, type, constop,
7328 TREE_OPERAND (varop, 0)));
7330 folded_compare = fold (build (code, type,
7331 TREE_OPERAND (varop, 0),
7333 if (integer_zerop (folded_compare)
7334 || integer_onep (folded_compare))
7335 return omit_one_operand (type, folded_compare, varop);
7337 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7338 precision = TYPE_PRECISION (unsigned_type);
7339 mask = build_int_2 (~0, ~0);
7340 TREE_TYPE (mask) = unsigned_type;
7341 force_fit_type (mask, 0);
7342 mask = const_binop (RSHIFT_EXPR, mask,
7343 size_int (precision - size), 0);
7344 newconst = fold (build (BIT_AND_EXPR,
7345 TREE_TYPE (varop), newconst,
7346 fold_convert (TREE_TYPE (varop),
7350 t = build (code, type,
7351 (constopnum == 0) ? newconst : varop,
7352 (constopnum == 1) ? newconst : varop);
7356 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7358 if (POINTER_TYPE_P (TREE_TYPE (varop))
7359 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7360 && (code == EQ_EXPR || code == NE_EXPR)))
7363 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7364 constop, TREE_OPERAND (varop, 1)));
7366 /* Do not overwrite the current varop to be a predecrement,
7367 create a new node so that we won't confuse our caller who
7368 might create trees and throw them away, reusing the
7369 arguments that they passed to build. This shows up in
7370 the THEN or ELSE parts of ?: being postdecrements. */
7371 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7372 TREE_OPERAND (varop, 0),
7373 TREE_OPERAND (varop, 1));
7375 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7376 && DECL_BIT_FIELD(TREE_OPERAND
7377 (TREE_OPERAND (varop, 0), 1)))
7380 = TREE_INT_CST_LOW (DECL_SIZE
7382 (TREE_OPERAND (varop, 0), 1)));
7383 tree mask, unsigned_type;
7384 unsigned int precision;
7385 tree folded_compare;
7387 if (constopnum == 0)
7388 folded_compare = fold (build (code, type, constop,
7389 TREE_OPERAND (varop, 0)));
7391 folded_compare = fold (build (code, type,
7392 TREE_OPERAND (varop, 0),
7394 if (integer_zerop (folded_compare)
7395 || integer_onep (folded_compare))
7396 return omit_one_operand (type, folded_compare, varop);
7398 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7399 precision = TYPE_PRECISION (unsigned_type);
7400 mask = build_int_2 (~0, ~0);
7401 TREE_TYPE (mask) = TREE_TYPE (varop);
7402 force_fit_type (mask, 0);
7403 mask = const_binop (RSHIFT_EXPR, mask,
7404 size_int (precision - size), 0);
7405 newconst = fold (build (BIT_AND_EXPR,
7406 TREE_TYPE (varop), newconst,
7407 fold_convert (TREE_TYPE (varop),
7411 t = build (code, type,
7412 (constopnum == 0) ? newconst : varop,
7413 (constopnum == 1) ? newconst : varop);
7419 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7420 This transformation affects the cases which are handled in later
7421 optimizations involving comparisons with non-negative constants. */
7422 if (TREE_CODE (arg1) == INTEGER_CST
7423 && TREE_CODE (arg0) != INTEGER_CST
7424 && tree_int_cst_sgn (arg1) > 0)
7429 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7430 return fold (build (GT_EXPR, type, arg0, arg1));
7433 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7434 return fold (build (LE_EXPR, type, arg0, arg1));
7441 /* Comparisons with the highest or lowest possible integer of
7442 the specified size will have known values. */
7444 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7446 if (TREE_CODE (arg1) == INTEGER_CST
7447 && ! TREE_CONSTANT_OVERFLOW (arg1)
7448 && width <= HOST_BITS_PER_WIDE_INT
7449 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7450 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7452 unsigned HOST_WIDE_INT signed_max;
7453 unsigned HOST_WIDE_INT max, min;
7455 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7457 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7459 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7465 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7468 if (TREE_INT_CST_HIGH (arg1) == 0
7469 && TREE_INT_CST_LOW (arg1) == max)
7473 return omit_one_operand (type,
7478 return fold (build (EQ_EXPR, type, arg0, arg1));
7481 return omit_one_operand (type,
7486 return fold (build (NE_EXPR, type, arg0, arg1));
7488 /* The GE_EXPR and LT_EXPR cases above are not normally
7489 reached because of previous transformations. */
7494 else if (TREE_INT_CST_HIGH (arg1) == 0
7495 && TREE_INT_CST_LOW (arg1) == max - 1)
7499 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7500 return fold (build (EQ_EXPR, type, arg0, arg1));
7502 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7503 return fold (build (NE_EXPR, type, arg0, arg1));
7507 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7508 && TREE_INT_CST_LOW (arg1) == min)
7512 return omit_one_operand (type,
7517 return fold (build (EQ_EXPR, type, arg0, arg1));
7520 return omit_one_operand (type,
7525 return fold (build (NE_EXPR, type, arg0, arg1));
7530 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7531 && TREE_INT_CST_LOW (arg1) == min + 1)
7535 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7536 return fold (build (NE_EXPR, type, arg0, arg1));
7538 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7539 return fold (build (EQ_EXPR, type, arg0, arg1));
7544 else if (TREE_INT_CST_HIGH (arg1) == 0
7545 && TREE_INT_CST_LOW (arg1) == signed_max
7546 && TREE_UNSIGNED (TREE_TYPE (arg1))
7547 /* signed_type does not work on pointer types. */
7548 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7550 /* The following case also applies to X < signed_max+1
7551 and X >= signed_max+1 because previous transformations. */
7552 if (code == LE_EXPR || code == GT_EXPR)
7555 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7556 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7558 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7559 type, fold_convert (st0, arg0),
7560 fold_convert (st1, integer_zero_node)));
7566 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7567 a MINUS_EXPR of a constant, we can convert it into a comparison with
7568 a revised constant as long as no overflow occurs. */
7569 if ((code == EQ_EXPR || code == NE_EXPR)
7570 && TREE_CODE (arg1) == INTEGER_CST
7571 && (TREE_CODE (arg0) == PLUS_EXPR
7572 || TREE_CODE (arg0) == MINUS_EXPR)
7573 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7574 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7575 ? MINUS_EXPR : PLUS_EXPR,
7576 arg1, TREE_OPERAND (arg0, 1), 0))
7577 && ! TREE_CONSTANT_OVERFLOW (tem))
7578 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7580 /* Similarly for a NEGATE_EXPR. */
7581 else if ((code == EQ_EXPR || code == NE_EXPR)
7582 && TREE_CODE (arg0) == NEGATE_EXPR
7583 && TREE_CODE (arg1) == INTEGER_CST
7584 && 0 != (tem = negate_expr (arg1))
7585 && TREE_CODE (tem) == INTEGER_CST
7586 && ! TREE_CONSTANT_OVERFLOW (tem))
7587 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7589 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7590 for !=. Don't do this for ordered comparisons due to overflow. */
7591 else if ((code == NE_EXPR || code == EQ_EXPR)
7592 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7593 return fold (build (code, type,
7594 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7596 /* If we are widening one operand of an integer comparison,
7597 see if the other operand is similarly being widened. Perhaps we
7598 can do the comparison in the narrower type. */
7599 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7600 && TREE_CODE (arg0) == NOP_EXPR
7601 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7602 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7603 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7604 || (TREE_CODE (t1) == INTEGER_CST
7605 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7606 return fold (build (code, type, tem,
7607 fold_convert (TREE_TYPE (tem), t1)));
7609 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7610 constant, we can simplify it. */
7611 else if (TREE_CODE (arg1) == INTEGER_CST
7612 && (TREE_CODE (arg0) == MIN_EXPR
7613 || TREE_CODE (arg0) == MAX_EXPR)
7614 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7615 return optimize_minmax_comparison (t);
7617 /* If we are comparing an ABS_EXPR with a constant, we can
7618 convert all the cases into explicit comparisons, but they may
7619 well not be faster than doing the ABS and one comparison.
7620 But ABS (X) <= C is a range comparison, which becomes a subtraction
7621 and a comparison, and is probably faster. */
7622 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7623 && TREE_CODE (arg0) == ABS_EXPR
7624 && ! TREE_SIDE_EFFECTS (arg0)
7625 && (0 != (tem = negate_expr (arg1)))
7626 && TREE_CODE (tem) == INTEGER_CST
7627 && ! TREE_CONSTANT_OVERFLOW (tem))
7628 return fold (build (TRUTH_ANDIF_EXPR, type,
7629 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7630 build (LE_EXPR, type,
7631 TREE_OPERAND (arg0, 0), arg1)));
7633 /* If this is an EQ or NE comparison with zero and ARG0 is
7634 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7635 two operations, but the latter can be done in one less insn
7636 on machines that have only two-operand insns or on which a
7637 constant cannot be the first operand. */
7638 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7639 && TREE_CODE (arg0) == BIT_AND_EXPR)
7641 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7642 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7644 fold (build (code, type,
7645 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7647 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7648 TREE_OPERAND (arg0, 1),
7649 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7650 fold_convert (TREE_TYPE (arg0),
7653 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7654 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7656 fold (build (code, type,
7657 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7659 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7660 TREE_OPERAND (arg0, 0),
7661 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7662 fold_convert (TREE_TYPE (arg0),
7667 /* If this is an NE or EQ comparison of zero against the result of a
7668 signed MOD operation whose second operand is a power of 2, make
7669 the MOD operation unsigned since it is simpler and equivalent. */
7670 if ((code == NE_EXPR || code == EQ_EXPR)
7671 && integer_zerop (arg1)
7672 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7673 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7674 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7675 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7676 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7677 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7679 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7680 tree newmod = build (TREE_CODE (arg0), newtype,
7681 fold_convert (newtype,
7682 TREE_OPERAND (arg0, 0)),
7683 fold_convert (newtype,
7684 TREE_OPERAND (arg0, 1)));
7686 return build (code, type, newmod, fold_convert (newtype, arg1));
7689 /* If this is an NE comparison of zero with an AND of one, remove the
7690 comparison since the AND will give the correct value. */
7691 if (code == NE_EXPR && integer_zerop (arg1)
7692 && TREE_CODE (arg0) == BIT_AND_EXPR
7693 && integer_onep (TREE_OPERAND (arg0, 1)))
7694 return fold_convert (type, arg0);
7696 /* If we have (A & C) == C where C is a power of 2, convert this into
7697 (A & C) != 0. Similarly for NE_EXPR. */
7698 if ((code == EQ_EXPR || code == NE_EXPR)
7699 && TREE_CODE (arg0) == BIT_AND_EXPR
7700 && integer_pow2p (TREE_OPERAND (arg0, 1))
7701 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7702 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7703 arg0, integer_zero_node));
7705 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7706 2, then fold the expression into shifts and logical operations. */
7707 tem = fold_single_bit_test (code, arg0, arg1, type);
7711 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7712 Similarly for NE_EXPR. */
7713 if ((code == EQ_EXPR || code == NE_EXPR)
7714 && TREE_CODE (arg0) == BIT_AND_EXPR
7715 && TREE_CODE (arg1) == INTEGER_CST
7716 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7719 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7720 arg1, build1 (BIT_NOT_EXPR,
7721 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7722 TREE_OPERAND (arg0, 1))));
7723 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7724 if (integer_nonzerop (dandnotc))
7725 return omit_one_operand (type, rslt, arg0);
7728 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7729 Similarly for NE_EXPR. */
7730 if ((code == EQ_EXPR || code == NE_EXPR)
7731 && TREE_CODE (arg0) == BIT_IOR_EXPR
7732 && TREE_CODE (arg1) == INTEGER_CST
7733 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7736 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7737 TREE_OPERAND (arg0, 1),
7738 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7739 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7740 if (integer_nonzerop (candnotd))
7741 return omit_one_operand (type, rslt, arg0);
7744 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7745 and similarly for >= into !=. */
7746 if ((code == LT_EXPR || code == GE_EXPR)
7747 && TREE_UNSIGNED (TREE_TYPE (arg0))
7748 && TREE_CODE (arg1) == LSHIFT_EXPR
7749 && integer_onep (TREE_OPERAND (arg1, 0)))
7750 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7751 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7752 TREE_OPERAND (arg1, 1)),
7753 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7755 else if ((code == LT_EXPR || code == GE_EXPR)
7756 && TREE_UNSIGNED (TREE_TYPE (arg0))
7757 && (TREE_CODE (arg1) == NOP_EXPR
7758 || TREE_CODE (arg1) == CONVERT_EXPR)
7759 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7760 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7762 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7763 fold_convert (TREE_TYPE (arg0),
7764 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7765 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7767 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7769 /* Simplify comparison of something with itself. (For IEEE
7770 floating-point, we can only do some of these simplifications.) */
7771 if (operand_equal_p (arg0, arg1, 0))
7776 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7777 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7778 return constant_boolean_node (1, type);
7783 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7784 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7785 return constant_boolean_node (1, type);
7786 return fold (build (EQ_EXPR, type, arg0, arg1));
7789 /* For NE, we can only do this simplification if integer
7790 or we don't honor IEEE floating point NaNs. */
7791 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7792 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7794 /* ... fall through ... */
7797 return constant_boolean_node (0, type);
7803 /* If we are comparing an expression that just has comparisons
7804 of two integer values, arithmetic expressions of those comparisons,
7805 and constants, we can simplify it. There are only three cases
7806 to check: the two values can either be equal, the first can be
7807 greater, or the second can be greater. Fold the expression for
7808 those three values. Since each value must be 0 or 1, we have
7809 eight possibilities, each of which corresponds to the constant 0
7810 or 1 or one of the six possible comparisons.
7812 This handles common cases like (a > b) == 0 but also handles
7813 expressions like ((x > y) - (y > x)) > 0, which supposedly
7814 occur in macroized code. */
7816 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7818 tree cval1 = 0, cval2 = 0;
7821 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7822 /* Don't handle degenerate cases here; they should already
7823 have been handled anyway. */
7824 && cval1 != 0 && cval2 != 0
7825 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7826 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7827 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7828 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7829 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7830 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7831 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7833 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7834 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7836 /* We can't just pass T to eval_subst in case cval1 or cval2
7837 was the same as ARG1. */
7840 = fold (build (code, type,
7841 eval_subst (arg0, cval1, maxval, cval2, minval),
7844 = fold (build (code, type,
7845 eval_subst (arg0, cval1, maxval, cval2, maxval),
7848 = fold (build (code, type,
7849 eval_subst (arg0, cval1, minval, cval2, maxval),
7852 /* All three of these results should be 0 or 1. Confirm they
7853 are. Then use those values to select the proper code
7856 if ((integer_zerop (high_result)
7857 || integer_onep (high_result))
7858 && (integer_zerop (equal_result)
7859 || integer_onep (equal_result))
7860 && (integer_zerop (low_result)
7861 || integer_onep (low_result)))
7863 /* Make a 3-bit mask with the high-order bit being the
7864 value for `>', the next for '=', and the low for '<'. */
7865 switch ((integer_onep (high_result) * 4)
7866 + (integer_onep (equal_result) * 2)
7867 + integer_onep (low_result))
7871 return omit_one_operand (type, integer_zero_node, arg0);
7892 return omit_one_operand (type, integer_one_node, arg0);
7895 t = build (code, type, cval1, cval2);
7897 return save_expr (t);
7904 /* If this is a comparison of a field, we may be able to simplify it. */
7905 if (((TREE_CODE (arg0) == COMPONENT_REF
7906 && (*lang_hooks.can_use_bit_fields_p) ())
7907 || TREE_CODE (arg0) == BIT_FIELD_REF)
7908 && (code == EQ_EXPR || code == NE_EXPR)
7909 /* Handle the constant case even without -O
7910 to make sure the warnings are given. */
7911 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7913 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7918 /* If this is a comparison of complex values and either or both sides
7919 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7920 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7921 This may prevent needless evaluations. */
7922 if ((code == EQ_EXPR || code == NE_EXPR)
7923 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7924 && (TREE_CODE (arg0) == COMPLEX_EXPR
7925 || TREE_CODE (arg1) == COMPLEX_EXPR
7926 || TREE_CODE (arg0) == COMPLEX_CST
7927 || TREE_CODE (arg1) == COMPLEX_CST))
7929 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7930 tree real0, imag0, real1, imag1;
7932 arg0 = save_expr (arg0);
7933 arg1 = save_expr (arg1);
7934 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7935 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7936 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7937 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7939 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7942 fold (build (code, type, real0, real1)),
7943 fold (build (code, type, imag0, imag1))));
7946 /* Optimize comparisons of strlen vs zero to a compare of the
7947 first character of the string vs zero. To wit,
7948 strlen(ptr) == 0 => *ptr == 0
7949 strlen(ptr) != 0 => *ptr != 0
7950 Other cases should reduce to one of these two (or a constant)
7951 due to the return value of strlen being unsigned. */
7952 if ((code == EQ_EXPR || code == NE_EXPR)
7953 && integer_zerop (arg1)
7954 && TREE_CODE (arg0) == CALL_EXPR)
7956 tree fndecl = get_callee_fndecl (arg0);
7960 && DECL_BUILT_IN (fndecl)
7961 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7962 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7963 && (arglist = TREE_OPERAND (arg0, 1))
7964 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7965 && ! TREE_CHAIN (arglist))
7966 return fold (build (code, type,
7967 build1 (INDIRECT_REF, char_type_node,
7968 TREE_VALUE(arglist)),
7969 integer_zero_node));
7972 /* From here on, the only cases we handle are when the result is
7973 known to be a constant.
7975 To compute GT, swap the arguments and do LT.
7976 To compute GE, do LT and invert the result.
7977 To compute LE, swap the arguments, do LT and invert the result.
7978 To compute NE, do EQ and invert the result.
7980 Therefore, the code below must handle only EQ and LT. */
7982 if (code == LE_EXPR || code == GT_EXPR)
7984 tem = arg0, arg0 = arg1, arg1 = tem;
7985 code = swap_tree_comparison (code);
7988 /* Note that it is safe to invert for real values here because we
7989 will check below in the one case that it matters. */
7993 if (code == NE_EXPR || code == GE_EXPR)
7996 code = invert_tree_comparison (code);
7999 /* Compute a result for LT or EQ if args permit;
8000 otherwise return T. */
8001 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8003 if (code == EQ_EXPR)
8004 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
8006 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
8007 ? INT_CST_LT_UNSIGNED (arg0, arg1)
8008 : INT_CST_LT (arg0, arg1)),
8012 #if 0 /* This is no longer useful, but breaks some real code. */
8013 /* Assume a nonexplicit constant cannot equal an explicit one,
8014 since such code would be undefined anyway.
8015 Exception: on sysvr4, using #pragma weak,
8016 a label can come out as 0. */
8017 else if (TREE_CODE (arg1) == INTEGER_CST
8018 && !integer_zerop (arg1)
8019 && TREE_CONSTANT (arg0)
8020 && TREE_CODE (arg0) == ADDR_EXPR
8022 t1 = build_int_2 (0, 0);
8024 /* Two real constants can be compared explicitly. */
8025 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8027 /* If either operand is a NaN, the result is false with two
8028 exceptions: First, an NE_EXPR is true on NaNs, but that case
8029 is already handled correctly since we will be inverting the
8030 result for NE_EXPR. Second, if we had inverted a LE_EXPR
8031 or a GE_EXPR into a LT_EXPR, we must return true so that it
8032 will be inverted into false. */
8034 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8035 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
8036 t1 = build_int_2 (invert && code == LT_EXPR, 0);
8038 else if (code == EQ_EXPR)
8039 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
8040 TREE_REAL_CST (arg1)),
8043 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
8044 TREE_REAL_CST (arg1)),
8048 if (t1 == NULL_TREE)
8052 TREE_INT_CST_LOW (t1) ^= 1;
8054 TREE_TYPE (t1) = type;
8055 if (TREE_CODE (type) == BOOLEAN_TYPE)
8056 return (*lang_hooks.truthvalue_conversion) (t1);
8060 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8061 so all simple results must be passed through pedantic_non_lvalue. */
8062 if (TREE_CODE (arg0) == INTEGER_CST)
8064 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8065 /* Only optimize constant conditions when the selected branch
8066 has the same type as the COND_EXPR. This avoids optimizing
8067 away "c ? x : throw", where the throw has a void type. */
8068 if (! VOID_TYPE_P (TREE_TYPE (tem))
8069 || VOID_TYPE_P (TREE_TYPE (t)))
8070 return pedantic_non_lvalue (tem);
8073 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
8074 return pedantic_omit_one_operand (type, arg1, arg0);
8076 /* If we have A op B ? A : C, we may be able to convert this to a
8077 simpler expression, depending on the operation and the values
8078 of B and C. Signed zeros prevent all of these transformations,
8079 for reasons given above each one. */
8081 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8082 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8083 arg1, TREE_OPERAND (arg0, 1))
8084 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8086 tree arg2 = TREE_OPERAND (t, 2);
8087 enum tree_code comp_code = TREE_CODE (arg0);
8091 /* If we have A op 0 ? A : -A, consider applying the following
8094 A == 0? A : -A same as -A
8095 A != 0? A : -A same as A
8096 A >= 0? A : -A same as abs (A)
8097 A > 0? A : -A same as abs (A)
8098 A <= 0? A : -A same as -abs (A)
8099 A < 0? A : -A same as -abs (A)
8101 None of these transformations work for modes with signed
8102 zeros. If A is +/-0, the first two transformations will
8103 change the sign of the result (from +0 to -0, or vice
8104 versa). The last four will fix the sign of the result,
8105 even though the original expressions could be positive or
8106 negative, depending on the sign of A.
8108 Note that all these transformations are correct if A is
8109 NaN, since the two alternatives (A and -A) are also NaNs. */
8110 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8111 ? real_zerop (TREE_OPERAND (arg0, 1))
8112 : integer_zerop (TREE_OPERAND (arg0, 1)))
8113 && TREE_CODE (arg2) == NEGATE_EXPR
8114 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8118 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8119 tem = fold_convert (type, negate_expr (tem));
8120 return pedantic_non_lvalue (tem);
8122 return pedantic_non_lvalue (fold_convert (type, arg1));
8125 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8126 arg1 = fold_convert ((*lang_hooks.types.signed_type)
8127 (TREE_TYPE (arg1)), arg1);
8128 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8129 return pedantic_non_lvalue (fold_convert (type, arg1));
8132 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8133 arg1 = fold_convert ((lang_hooks.types.signed_type)
8134 (TREE_TYPE (arg1)), arg1);
8135 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8136 arg1 = negate_expr (fold_convert (type, arg1));
8137 return pedantic_non_lvalue (arg1);
8142 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8143 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8144 both transformations are correct when A is NaN: A != 0
8145 is then true, and A == 0 is false. */
8147 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8149 if (comp_code == NE_EXPR)
8150 return pedantic_non_lvalue (fold_convert (type, arg1));
8151 else if (comp_code == EQ_EXPR)
8152 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8155 /* Try some transformations of A op B ? A : B.
8157 A == B? A : B same as B
8158 A != B? A : B same as A
8159 A >= B? A : B same as max (A, B)
8160 A > B? A : B same as max (B, A)
8161 A <= B? A : B same as min (A, B)
8162 A < B? A : B same as min (B, A)
8164 As above, these transformations don't work in the presence
8165 of signed zeros. For example, if A and B are zeros of
8166 opposite sign, the first two transformations will change
8167 the sign of the result. In the last four, the original
8168 expressions give different results for (A=+0, B=-0) and
8169 (A=-0, B=+0), but the transformed expressions do not.
8171 The first two transformations are correct if either A or B
8172 is a NaN. In the first transformation, the condition will
8173 be false, and B will indeed be chosen. In the case of the
8174 second transformation, the condition A != B will be true,
8175 and A will be chosen.
8177 The conversions to max() and min() are not correct if B is
8178 a number and A is not. The conditions in the original
8179 expressions will be false, so all four give B. The min()
8180 and max() versions would give a NaN instead. */
8181 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8182 arg2, TREE_OPERAND (arg0, 0)))
8184 tree comp_op0 = TREE_OPERAND (arg0, 0);
8185 tree comp_op1 = TREE_OPERAND (arg0, 1);
8186 tree comp_type = TREE_TYPE (comp_op0);
8188 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8189 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8199 return pedantic_non_lvalue (fold_convert (type, arg2));
8201 return pedantic_non_lvalue (fold_convert (type, arg1));
8204 /* In C++ a ?: expression can be an lvalue, so put the
8205 operand which will be used if they are equal first
8206 so that we can convert this back to the
8207 corresponding COND_EXPR. */
8208 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8209 return pedantic_non_lvalue (fold_convert
8210 (type, fold (build (MIN_EXPR, comp_type,
8211 (comp_code == LE_EXPR
8212 ? comp_op0 : comp_op1),
8213 (comp_code == LE_EXPR
8214 ? comp_op1 : comp_op0)))));
8218 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8219 return pedantic_non_lvalue (fold_convert
8220 (type, fold (build (MAX_EXPR, comp_type,
8221 (comp_code == GE_EXPR
8222 ? comp_op0 : comp_op1),
8223 (comp_code == GE_EXPR
8224 ? comp_op1 : comp_op0)))));
8231 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8232 we might still be able to simplify this. For example,
8233 if C1 is one less or one more than C2, this might have started
8234 out as a MIN or MAX and been transformed by this function.
8235 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8237 if (INTEGRAL_TYPE_P (type)
8238 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8239 && TREE_CODE (arg2) == INTEGER_CST)
8243 /* We can replace A with C1 in this case. */
8244 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8245 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8246 TREE_OPERAND (t, 2)));
8249 /* If C1 is C2 + 1, this is min(A, C2). */
8250 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8251 && operand_equal_p (TREE_OPERAND (arg0, 1),
8252 const_binop (PLUS_EXPR, arg2,
8253 integer_one_node, 0), 1))
8254 return pedantic_non_lvalue
8255 (fold (build (MIN_EXPR, type, arg1, arg2)));
8259 /* If C1 is C2 - 1, this is min(A, C2). */
8260 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8261 && operand_equal_p (TREE_OPERAND (arg0, 1),
8262 const_binop (MINUS_EXPR, arg2,
8263 integer_one_node, 0), 1))
8264 return pedantic_non_lvalue
8265 (fold (build (MIN_EXPR, type, arg1, arg2)));
8269 /* If C1 is C2 - 1, this is max(A, C2). */
8270 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8271 && operand_equal_p (TREE_OPERAND (arg0, 1),
8272 const_binop (MINUS_EXPR, arg2,
8273 integer_one_node, 0), 1))
8274 return pedantic_non_lvalue
8275 (fold (build (MAX_EXPR, type, arg1, arg2)));
8279 /* If C1 is C2 + 1, this is max(A, C2). */
8280 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8281 && operand_equal_p (TREE_OPERAND (arg0, 1),
8282 const_binop (PLUS_EXPR, arg2,
8283 integer_one_node, 0), 1))
8284 return pedantic_non_lvalue
8285 (fold (build (MAX_EXPR, type, arg1, arg2)));
8294 /* If the second operand is simpler than the third, swap them
8295 since that produces better jump optimization results. */
8296 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8297 TREE_OPERAND (t, 2), false))
8299 /* See if this can be inverted. If it can't, possibly because
8300 it was a floating-point inequality comparison, don't do
8302 tem = invert_truthvalue (arg0);
8304 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8305 return fold (build (code, type, tem,
8306 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8309 /* Convert A ? 1 : 0 to simply A. */
8310 if (integer_onep (TREE_OPERAND (t, 1))
8311 && integer_zerop (TREE_OPERAND (t, 2))
8312 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8313 call to fold will try to move the conversion inside
8314 a COND, which will recurse. In that case, the COND_EXPR
8315 is probably the best choice, so leave it alone. */
8316 && type == TREE_TYPE (arg0))
8317 return pedantic_non_lvalue (arg0);
8319 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8320 over COND_EXPR in cases such as floating point comparisons. */
8321 if (integer_zerop (TREE_OPERAND (t, 1))
8322 && integer_onep (TREE_OPERAND (t, 2))
8323 && truth_value_p (TREE_CODE (arg0)))
8324 return pedantic_non_lvalue (fold_convert (type,
8325 invert_truthvalue (arg0)));
8327 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8328 operation is simply A & 2. */
8330 if (integer_zerop (TREE_OPERAND (t, 2))
8331 && TREE_CODE (arg0) == NE_EXPR
8332 && integer_zerop (TREE_OPERAND (arg0, 1))
8333 && integer_pow2p (arg1)
8334 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8335 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8337 return pedantic_non_lvalue (fold_convert (type,
8338 TREE_OPERAND (arg0, 0)));
8340 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8341 if (integer_zerop (TREE_OPERAND (t, 2))
8342 && truth_value_p (TREE_CODE (arg0))
8343 && truth_value_p (TREE_CODE (arg1)))
8344 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8347 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8348 if (integer_onep (TREE_OPERAND (t, 2))
8349 && truth_value_p (TREE_CODE (arg0))
8350 && truth_value_p (TREE_CODE (arg1)))
8352 /* Only perform transformation if ARG0 is easily inverted. */
8353 tem = invert_truthvalue (arg0);
8354 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8355 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8362 /* When pedantic, a compound expression can be neither an lvalue
8363 nor an integer constant expression. */
8364 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8366 /* Don't let (0, 0) be null pointer constant. */
8367 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8368 : fold_convert (type, arg1);
8369 return pedantic_non_lvalue (tem);
8373 return build_complex (type, arg0, arg1);
8377 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8379 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8380 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8381 TREE_OPERAND (arg0, 1));
8382 else if (TREE_CODE (arg0) == COMPLEX_CST)
8383 return TREE_REALPART (arg0);
8384 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8385 return fold (build (TREE_CODE (arg0), type,
8386 fold (build1 (REALPART_EXPR, type,
8387 TREE_OPERAND (arg0, 0))),
8388 fold (build1 (REALPART_EXPR,
8389 type, TREE_OPERAND (arg0, 1)))));
8393 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8394 return fold_convert (type, integer_zero_node);
8395 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8396 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8397 TREE_OPERAND (arg0, 0));
8398 else if (TREE_CODE (arg0) == COMPLEX_CST)
8399 return TREE_IMAGPART (arg0);
8400 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8401 return fold (build (TREE_CODE (arg0), type,
8402 fold (build1 (IMAGPART_EXPR, type,
8403 TREE_OPERAND (arg0, 0))),
8404 fold (build1 (IMAGPART_EXPR, type,
8405 TREE_OPERAND (arg0, 1)))));
8408 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8410 case CLEANUP_POINT_EXPR:
8411 if (! has_cleanups (arg0))
8412 return TREE_OPERAND (t, 0);
8415 enum tree_code code0 = TREE_CODE (arg0);
8416 int kind0 = TREE_CODE_CLASS (code0);
8417 tree arg00 = TREE_OPERAND (arg0, 0);
8420 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8421 return fold (build1 (code0, type,
8422 fold (build1 (CLEANUP_POINT_EXPR,
8423 TREE_TYPE (arg00), arg00))));
8425 if (kind0 == '<' || kind0 == '2'
8426 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8427 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8428 || code0 == TRUTH_XOR_EXPR)
8430 arg01 = TREE_OPERAND (arg0, 1);
8432 if (TREE_CONSTANT (arg00)
8433 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8434 && ! has_cleanups (arg00)))
8435 return fold (build (code0, type, arg00,
8436 fold (build1 (CLEANUP_POINT_EXPR,
8437 TREE_TYPE (arg01), arg01))));
8439 if (TREE_CONSTANT (arg01))
8440 return fold (build (code0, type,
8441 fold (build1 (CLEANUP_POINT_EXPR,
8442 TREE_TYPE (arg00), arg00)),
8450 /* Check for a built-in function. */
8451 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8452 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8454 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8456 tree tmp = fold_builtin (expr);
8464 } /* switch (code) */
8467 #ifdef ENABLE_FOLD_CHECKING
8470 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8471 static void fold_check_failed (tree, tree);
8472 void print_fold_checksum (tree);
8474 /* When --enable-checking=fold, compute a digest of expr before
8475 and after actual fold call to see if fold did not accidentally
8476 change original expr. */
8483 unsigned char checksum_before[16], checksum_after[16];
8486 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8487 md5_init_ctx (&ctx);
8488 fold_checksum_tree (expr, &ctx, ht);
8489 md5_finish_ctx (&ctx, checksum_before);
8492 ret = fold_1 (expr);
8494 md5_init_ctx (&ctx);
8495 fold_checksum_tree (expr, &ctx, ht);
8496 md5_finish_ctx (&ctx, checksum_after);
8499 if (memcmp (checksum_before, checksum_after, 16))
8500 fold_check_failed (expr, ret);
8506 print_fold_checksum (tree expr)
8509 unsigned char checksum[16], cnt;
8512 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8513 md5_init_ctx (&ctx);
8514 fold_checksum_tree (expr, &ctx, ht);
8515 md5_finish_ctx (&ctx, checksum);
8517 for (cnt = 0; cnt < 16; ++cnt)
8518 fprintf (stderr, "%02x", checksum[cnt]);
8519 putc ('\n', stderr);
8523 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8525 internal_error ("fold check: original tree changed by fold");
8529 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8532 enum tree_code code;
8533 char buf[sizeof (struct tree_decl)];
8536 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8537 > sizeof (struct tree_decl)
8538 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8542 slot = htab_find_slot (ht, expr, INSERT);
8546 code = TREE_CODE (expr);
8547 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8549 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8550 memcpy (buf, expr, tree_size (expr));
8552 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8554 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8556 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8557 memcpy (buf, expr, tree_size (expr));
8559 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8561 else if (TREE_CODE_CLASS (code) == 't'
8562 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8564 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8565 memcpy (buf, expr, tree_size (expr));
8567 TYPE_POINTER_TO (expr) = NULL;
8568 TYPE_REFERENCE_TO (expr) = NULL;
8570 md5_process_bytes (expr, tree_size (expr), ctx);
8571 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8572 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8573 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8574 len = TREE_CODE_LENGTH (code);
8575 switch (TREE_CODE_CLASS (code))
8581 md5_process_bytes (TREE_STRING_POINTER (expr),
8582 TREE_STRING_LENGTH (expr), ctx);
8585 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8586 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8589 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8599 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8600 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8603 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8604 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8613 case SAVE_EXPR: len = 2; break;
8614 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8615 case RTL_EXPR: len = 0; break;
8616 case WITH_CLEANUP_EXPR: len = 2; break;
8625 for (i = 0; i < len; ++i)
8626 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8629 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8630 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8631 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8632 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8633 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8634 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8635 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8636 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8637 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8638 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8639 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8642 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8643 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8644 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8645 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8646 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8647 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8648 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8649 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8650 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8651 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8660 /* Perform constant folding and related simplification of initializer
8661 expression EXPR. This behaves identically to "fold" but ignores
8662 potential run-time traps and exceptions that fold must preserve. */
8665 fold_initializer (tree expr)
8667 int saved_signaling_nans = flag_signaling_nans;
8668 int saved_trapping_math = flag_trapping_math;
8669 int saved_trapv = flag_trapv;
8672 flag_signaling_nans = 0;
8673 flag_trapping_math = 0;
8676 result = fold (expr);
8678 flag_signaling_nans = saved_signaling_nans;
8679 flag_trapping_math = saved_trapping_math;
8680 flag_trapv = saved_trapv;
8685 /* Determine if first argument is a multiple of second argument. Return 0 if
8686 it is not, or we cannot easily determined it to be.
8688 An example of the sort of thing we care about (at this point; this routine
8689 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8690 fold cases do now) is discovering that
8692 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8698 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8700 This code also handles discovering that
8702 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8704 is a multiple of 8 so we don't have to worry about dealing with a
8707 Note that we *look* inside a SAVE_EXPR only to determine how it was
8708 calculated; it is not safe for fold to do much of anything else with the
8709 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8710 at run time. For example, the latter example above *cannot* be implemented
8711 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8712 evaluation time of the original SAVE_EXPR is not necessarily the same at
8713 the time the new expression is evaluated. The only optimization of this
8714 sort that would be valid is changing
8716 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8720 SAVE_EXPR (I) * SAVE_EXPR (J)
8722 (where the same SAVE_EXPR (J) is used in the original and the
8723 transformed version). */
8726 multiple_of_p (tree type, tree top, tree bottom)
8728 if (operand_equal_p (top, bottom, 0))
8731 if (TREE_CODE (type) != INTEGER_TYPE)
8734 switch (TREE_CODE (top))
8737 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8738 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8742 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8743 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8746 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8750 op1 = TREE_OPERAND (top, 1);
8751 /* const_binop may not detect overflow correctly,
8752 so check for it explicitly here. */
8753 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8754 > TREE_INT_CST_LOW (op1)
8755 && TREE_INT_CST_HIGH (op1) == 0
8756 && 0 != (t1 = fold_convert (type,
8757 const_binop (LSHIFT_EXPR,
8760 && ! TREE_OVERFLOW (t1))
8761 return multiple_of_p (type, t1, bottom);
8766 /* Can't handle conversions from non-integral or wider integral type. */
8767 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8768 || (TYPE_PRECISION (type)
8769 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8772 /* .. fall through ... */
8775 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8778 if (TREE_CODE (bottom) != INTEGER_CST
8779 || (TREE_UNSIGNED (type)
8780 && (tree_int_cst_sgn (top) < 0
8781 || tree_int_cst_sgn (bottom) < 0)))
8783 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8791 /* Return true if `t' is known to be non-negative. */
8794 tree_expr_nonnegative_p (tree t)
8796 switch (TREE_CODE (t))
8802 return tree_int_cst_sgn (t) >= 0;
8805 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8808 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8809 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8810 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8812 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8813 both unsigned and at least 2 bits shorter than the result. */
8814 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8815 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8816 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8818 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8819 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8820 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8821 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8823 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8824 TYPE_PRECISION (inner2)) + 1;
8825 return prec < TYPE_PRECISION (TREE_TYPE (t));
8831 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8833 /* x * x for floating point x is always non-negative. */
8834 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8836 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8837 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8840 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8841 both unsigned and their total bits is shorter than the result. */
8842 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8843 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8844 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8846 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8847 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8848 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8849 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8850 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8851 < TYPE_PRECISION (TREE_TYPE (t));
8855 case TRUNC_DIV_EXPR:
8857 case FLOOR_DIV_EXPR:
8858 case ROUND_DIV_EXPR:
8859 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8860 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8862 case TRUNC_MOD_EXPR:
8864 case FLOOR_MOD_EXPR:
8865 case ROUND_MOD_EXPR:
8866 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8869 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8870 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8874 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8875 tree outer_type = TREE_TYPE (t);
8877 if (TREE_CODE (outer_type) == REAL_TYPE)
8879 if (TREE_CODE (inner_type) == REAL_TYPE)
8880 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8881 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8883 if (TREE_UNSIGNED (inner_type))
8885 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8888 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8890 if (TREE_CODE (inner_type) == REAL_TYPE)
8891 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8892 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8893 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8894 && TREE_UNSIGNED (inner_type);
8900 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8901 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8903 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8905 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8906 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8908 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8909 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8911 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8913 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8915 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8916 case NON_LVALUE_EXPR:
8917 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8919 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8921 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8925 tree fndecl = get_callee_fndecl (t);
8926 tree arglist = TREE_OPERAND (t, 1);
8928 && DECL_BUILT_IN (fndecl)
8929 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8930 switch (DECL_FUNCTION_CODE (fndecl))
8933 case BUILT_IN_CABSL:
8934 case BUILT_IN_CABSF:
8939 case BUILT_IN_EXP2F:
8940 case BUILT_IN_EXP2L:
8941 case BUILT_IN_EXP10:
8942 case BUILT_IN_EXP10F:
8943 case BUILT_IN_EXP10L:
8945 case BUILT_IN_FABSF:
8946 case BUILT_IN_FABSL:
8949 case BUILT_IN_FFSLL:
8950 case BUILT_IN_PARITY:
8951 case BUILT_IN_PARITYL:
8952 case BUILT_IN_PARITYLL:
8953 case BUILT_IN_POPCOUNT:
8954 case BUILT_IN_POPCOUNTL:
8955 case BUILT_IN_POPCOUNTLL:
8956 case BUILT_IN_POW10:
8957 case BUILT_IN_POW10F:
8958 case BUILT_IN_POW10L:
8960 case BUILT_IN_SQRTF:
8961 case BUILT_IN_SQRTL:
8965 case BUILT_IN_ATANF:
8966 case BUILT_IN_ATANL:
8968 case BUILT_IN_CEILF:
8969 case BUILT_IN_CEILL:
8970 case BUILT_IN_FLOOR:
8971 case BUILT_IN_FLOORF:
8972 case BUILT_IN_FLOORL:
8973 case BUILT_IN_NEARBYINT:
8974 case BUILT_IN_NEARBYINTF:
8975 case BUILT_IN_NEARBYINTL:
8976 case BUILT_IN_ROUND:
8977 case BUILT_IN_ROUNDF:
8978 case BUILT_IN_ROUNDL:
8979 case BUILT_IN_TRUNC:
8980 case BUILT_IN_TRUNCF:
8981 case BUILT_IN_TRUNCL:
8982 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8987 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8994 /* ... fall through ... */
8997 if (truth_value_p (TREE_CODE (t)))
8998 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9002 /* We don't know sign of `t', so be conservative and return false. */
9006 /* Return true if `r' is known to be non-negative.
9007 Only handles constants at the moment. */
9010 rtl_expr_nonnegative_p (rtx r)
9012 switch (GET_CODE (r))
9015 return INTVAL (r) >= 0;
9018 if (GET_MODE (r) == VOIDmode)
9019 return CONST_DOUBLE_HIGH (r) >= 0;
9027 units = CONST_VECTOR_NUNITS (r);
9029 for (i = 0; i < units; ++i)
9031 elt = CONST_VECTOR_ELT (r, i);
9032 if (!rtl_expr_nonnegative_p (elt))
9041 /* These are always nonnegative. */
9049 #include "gt-fold-const.h"