1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 /* The following constants represent a bit based encoding of GCC's
117 comparison operators. This encoding simplifies transformations
118 on relational comparison operators, such as AND and OR. */
119 #define COMPCODE_FALSE 0
120 #define COMPCODE_LT 1
121 #define COMPCODE_EQ 2
122 #define COMPCODE_LE 3
123 #define COMPCODE_GT 4
124 #define COMPCODE_NE 5
125 #define COMPCODE_GE 6
126 #define COMPCODE_TRUE 7
128 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
130 and SUM1. Then this yields nonzero if overflow occurred during the
133 Overflow occurs if A and B have the same sign, but A and SUM differ in
134 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
136 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
138 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139 We do that by representing the two-word integer in 4 words, with only
140 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141 number. The value of the word is LOWPART + HIGHPART * BASE. */
144 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145 #define HIGHPART(x) \
146 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
149 /* Unpack a two-word integer into 4 words.
150 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151 WORDS points to the array of HOST_WIDE_INTs. */
154 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
156 words[0] = LOWPART (low);
157 words[1] = HIGHPART (low);
158 words[2] = LOWPART (hi);
159 words[3] = HIGHPART (hi);
162 /* Pack an array of 4 words into a two-word integer.
163 WORDS points to the array of words.
164 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
167 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
170 *low = words[0] + words[1] * BASE;
171 *hi = words[2] + words[3] * BASE;
174 /* Make the integer constant T valid for its type by setting to 0 or 1 all
175 the bits in the constant that don't belong in the type.
177 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
178 nonzero, a signed overflow has already occurred in calculating T, so
182 force_fit_type (tree t, int overflow)
184 unsigned HOST_WIDE_INT low;
188 if (TREE_CODE (t) == REAL_CST)
190 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191 Consider doing it via real_convert now. */
195 else if (TREE_CODE (t) != INTEGER_CST)
198 low = TREE_INT_CST_LOW (t);
199 high = TREE_INT_CST_HIGH (t);
201 if (POINTER_TYPE_P (TREE_TYPE (t))
202 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
205 prec = TYPE_PRECISION (TREE_TYPE (t));
207 /* First clear all bits that are beyond the type's precision. */
209 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
211 else if (prec > HOST_BITS_PER_WIDE_INT)
212 TREE_INT_CST_HIGH (t)
213 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
216 TREE_INT_CST_HIGH (t) = 0;
217 if (prec < HOST_BITS_PER_WIDE_INT)
218 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
221 /* Unsigned types do not suffer sign extension or overflow unless they
223 if (TREE_UNSIGNED (TREE_TYPE (t))
224 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
228 /* If the value's sign bit is set, extend the sign. */
229 if (prec != 2 * HOST_BITS_PER_WIDE_INT
230 && (prec > HOST_BITS_PER_WIDE_INT
231 ? 0 != (TREE_INT_CST_HIGH (t)
233 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234 : 0 != (TREE_INT_CST_LOW (t)
235 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
237 /* Value is negative:
238 set to 1 all the bits that are outside this type's precision. */
239 if (prec > HOST_BITS_PER_WIDE_INT)
240 TREE_INT_CST_HIGH (t)
241 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
244 TREE_INT_CST_HIGH (t) = -1;
245 if (prec < HOST_BITS_PER_WIDE_INT)
246 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
250 /* Return nonzero if signed overflow occurred. */
252 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
256 /* Add two doubleword integers with doubleword result.
257 Each argument is given as two `HOST_WIDE_INT' pieces.
258 One argument is L1 and H1; the other, L2 and H2.
259 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
262 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
266 unsigned HOST_WIDE_INT l;
270 h = h1 + h2 + (l < l1);
274 return OVERFLOW_SUM_SIGN (h1, h2, h);
277 /* Negate a doubleword integer with doubleword result.
278 Return nonzero if the operation overflows, assuming it's signed.
279 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
283 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
290 return (*hv & h1) < 0;
300 /* Multiply two doubleword integers with doubleword result.
301 Return nonzero if the operation overflows, assuming it's signed.
302 Each argument is given as two `HOST_WIDE_INT' pieces.
303 One argument is L1 and H1; the other, L2 and H2.
304 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
307 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
311 HOST_WIDE_INT arg1[4];
312 HOST_WIDE_INT arg2[4];
313 HOST_WIDE_INT prod[4 * 2];
314 unsigned HOST_WIDE_INT carry;
316 unsigned HOST_WIDE_INT toplow, neglow;
317 HOST_WIDE_INT tophigh, neghigh;
319 encode (arg1, l1, h1);
320 encode (arg2, l2, h2);
322 memset (prod, 0, sizeof prod);
324 for (i = 0; i < 4; i++)
327 for (j = 0; j < 4; j++)
330 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
331 carry += arg1[i] * arg2[j];
332 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
334 prod[k] = LOWPART (carry);
335 carry = HIGHPART (carry);
340 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
342 /* Check for overflow by calculating the top half of the answer in full;
343 it should agree with the low half's sign bit. */
344 decode (prod + 4, &toplow, &tophigh);
347 neg_double (l2, h2, &neglow, &neghigh);
348 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
352 neg_double (l1, h1, &neglow, &neghigh);
353 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
355 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
358 /* Shift the doubleword integer in L1, H1 left by COUNT places
359 keeping only PREC bits of result.
360 Shift right if COUNT is negative.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
365 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366 HOST_WIDE_INT count, unsigned int prec,
367 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
369 unsigned HOST_WIDE_INT signmask;
373 rshift_double (l1, h1, -count, prec, lv, hv, arith);
377 #ifdef SHIFT_COUNT_TRUNCATED
378 if (SHIFT_COUNT_TRUNCATED)
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
434 unsigned HOST_WIDE_INT signmask;
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
440 #ifdef SHIFT_COUNT_TRUNCATED
441 if (SHIFT_COUNT_TRUNCATED)
445 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
447 /* Shifting by the host word size is undefined according to the
448 ANSI standard, so we must handle this as a special case. */
452 else if (count >= HOST_BITS_PER_WIDE_INT)
455 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
459 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
461 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
464 /* Zero / sign extend all bits that are beyond the precision. */
466 if (count >= (HOST_WIDE_INT)prec)
471 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
473 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
475 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
481 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482 *lv |= signmask << (prec - count);
486 /* Rotate the doubleword integer in L1, H1 left by COUNT places
487 keeping only PREC bits of result.
488 Rotate right if COUNT is negative.
489 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
492 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493 HOST_WIDE_INT count, unsigned int prec,
494 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
496 unsigned HOST_WIDE_INT s1l, s2l;
497 HOST_WIDE_INT s1h, s2h;
503 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
509 /* Rotate the doubleword integer in L1, H1 left by COUNT places
510 keeping only PREC bits of result. COUNT must be positive.
511 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515 HOST_WIDE_INT count, unsigned int prec,
516 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
518 unsigned HOST_WIDE_INT s1l, s2l;
519 HOST_WIDE_INT s1h, s2h;
525 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to an integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (enum tree_code code, int uns,
542 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543 HOST_WIDE_INT hnum_orig,
544 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545 HOST_WIDE_INT hden_orig,
546 unsigned HOST_WIDE_INT *lquo,
547 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
551 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
552 HOST_WIDE_INT den[4], quo[4];
554 unsigned HOST_WIDE_INT work;
555 unsigned HOST_WIDE_INT carry = 0;
556 unsigned HOST_WIDE_INT lnum = lnum_orig;
557 HOST_WIDE_INT hnum = hnum_orig;
558 unsigned HOST_WIDE_INT lden = lden_orig;
559 HOST_WIDE_INT hden = hden_orig;
562 if (hden == 0 && lden == 0)
563 overflow = 1, lden = 1;
565 /* Calculate quotient sign and convert operands to unsigned. */
571 /* (minimum integer) / (-1) is the only overflow case. */
572 if (neg_double (lnum, hnum, &lnum, &hnum)
573 && ((HOST_WIDE_INT) lden & hden) == -1)
579 neg_double (lden, hden, &lden, &hden);
583 if (hnum == 0 && hden == 0)
584 { /* single precision */
586 /* This unsigned division rounds toward zero. */
592 { /* trivial case: dividend < divisor */
593 /* hden != 0 already checked. */
600 memset (quo, 0, sizeof quo);
602 memset (num, 0, sizeof num); /* to zero 9th element */
603 memset (den, 0, sizeof den);
605 encode (num, lnum, hnum);
606 encode (den, lden, hden);
608 /* Special code for when the divisor < BASE. */
609 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
611 /* hnum != 0 already checked. */
612 for (i = 4 - 1; i >= 0; i--)
614 work = num[i] + carry * BASE;
615 quo[i] = work / lden;
621 /* Full double precision division,
622 with thanks to Don Knuth's "Seminumerical Algorithms". */
623 int num_hi_sig, den_hi_sig;
624 unsigned HOST_WIDE_INT quo_est, scale;
626 /* Find the highest nonzero divisor digit. */
627 for (i = 4 - 1;; i--)
634 /* Insure that the first digit of the divisor is at least BASE/2.
635 This is required by the quotient digit estimation algorithm. */
637 scale = BASE / (den[den_hi_sig] + 1);
639 { /* scale divisor and dividend */
641 for (i = 0; i <= 4 - 1; i++)
643 work = (num[i] * scale) + carry;
644 num[i] = LOWPART (work);
645 carry = HIGHPART (work);
650 for (i = 0; i <= 4 - 1; i++)
652 work = (den[i] * scale) + carry;
653 den[i] = LOWPART (work);
654 carry = HIGHPART (work);
655 if (den[i] != 0) den_hi_sig = i;
662 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
664 /* Guess the next quotient digit, quo_est, by dividing the first
665 two remaining dividend digits by the high order quotient digit.
666 quo_est is never low and is at most 2 high. */
667 unsigned HOST_WIDE_INT tmp;
669 num_hi_sig = i + den_hi_sig + 1;
670 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671 if (num[num_hi_sig] != den[den_hi_sig])
672 quo_est = work / den[den_hi_sig];
676 /* Refine quo_est so it's usually correct, and at most one high. */
677 tmp = work - quo_est * den[den_hi_sig];
679 && (den[den_hi_sig - 1] * quo_est
680 > (tmp * BASE + num[num_hi_sig - 2])))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
688 for (j = 0; j <= den_hi_sig; j++)
690 work = quo_est * den[j] + carry;
691 carry = HIGHPART (work);
692 work = num[i + j] - LOWPART (work);
693 num[i + j] = LOWPART (work);
694 carry += HIGHPART (work) != 0;
697 /* If quo_est was high by one, then num[i] went negative and
698 we need to correct things. */
699 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
702 carry = 0; /* add divisor back in */
703 for (j = 0; j <= den_hi_sig; j++)
705 work = num[i + j] + den[j] + carry;
706 carry = HIGHPART (work);
707 num[i + j] = LOWPART (work);
710 num [num_hi_sig] += carry;
713 /* Store the quotient digit. */
718 decode (quo, lquo, hquo);
721 /* If result is negative, make it so. */
723 neg_double (*lquo, *hquo, lquo, hquo);
725 /* compute trial remainder: rem = num - (quo * den) */
726 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727 neg_double (*lrem, *hrem, lrem, hrem);
728 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
733 case TRUNC_MOD_EXPR: /* round toward zero */
734 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
738 case FLOOR_MOD_EXPR: /* round toward negative infinity */
739 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
742 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
750 case CEIL_MOD_EXPR: /* round toward positive infinity */
751 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
753 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
761 case ROUND_MOD_EXPR: /* round to closest integer */
763 unsigned HOST_WIDE_INT labs_rem = *lrem;
764 HOST_WIDE_INT habs_rem = *hrem;
765 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766 HOST_WIDE_INT habs_den = hden, htwice;
768 /* Get absolute values. */
770 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
772 neg_double (lden, hden, &labs_den, &habs_den);
774 /* If (2 * abs (lrem) >= abs (lden)) */
775 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776 labs_rem, habs_rem, <wice, &htwice);
778 if (((unsigned HOST_WIDE_INT) habs_den
779 < (unsigned HOST_WIDE_INT) htwice)
780 || (((unsigned HOST_WIDE_INT) habs_den
781 == (unsigned HOST_WIDE_INT) htwice)
782 && (labs_den < ltwice)))
786 add_double (*lquo, *hquo,
787 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
790 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 /* Compute true remainder: rem = num - (quo * den) */
803 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804 neg_double (*lrem, *hrem, lrem, hrem);
805 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
809 /* Return true if built-in mathematical function specified by CODE
810 preserves the sign of it argument, i.e. -f(x) == f(-x). */
813 negate_mathfn_p (enum built_in_function code)
837 /* Determine whether an expression T can be cheaply negated using
838 the function negate_expr. */
841 negate_expr_p (tree t)
843 unsigned HOST_WIDE_INT val;
850 type = TREE_TYPE (t);
853 switch (TREE_CODE (t))
856 if (TREE_UNSIGNED (type) || ! flag_trapv)
859 /* Check that -CST will not overflow type. */
860 prec = TYPE_PRECISION (type);
861 if (prec > HOST_BITS_PER_WIDE_INT)
863 if (TREE_INT_CST_LOW (t) != 0)
865 prec -= HOST_BITS_PER_WIDE_INT;
866 val = TREE_INT_CST_HIGH (t);
869 val = TREE_INT_CST_LOW (t);
870 if (prec < HOST_BITS_PER_WIDE_INT)
871 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
872 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
879 return negate_expr_p (TREE_REALPART (t))
880 && negate_expr_p (TREE_IMAGPART (t));
883 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
885 /* -(A + B) -> (-B) - A. */
886 if (negate_expr_p (TREE_OPERAND (t, 1))
887 && reorder_operands_p (TREE_OPERAND (t, 0),
888 TREE_OPERAND (t, 1)))
890 /* -(A + B) -> (-A) - B. */
891 return negate_expr_p (TREE_OPERAND (t, 0));
894 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
895 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
896 && reorder_operands_p (TREE_OPERAND (t, 0),
897 TREE_OPERAND (t, 1));
900 if (TREE_UNSIGNED (TREE_TYPE (t)))
906 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
907 return negate_expr_p (TREE_OPERAND (t, 1))
908 || negate_expr_p (TREE_OPERAND (t, 0));
912 /* Negate -((double)float) as (double)(-float). */
913 if (TREE_CODE (type) == REAL_TYPE)
915 tree tem = strip_float_extensions (t);
917 return negate_expr_p (tem);
922 /* Negate -f(x) as f(-x). */
923 if (negate_mathfn_p (builtin_mathfn_code (t)))
924 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
933 /* Given T, an expression, return the negation of T. Allow for T to be
934 null, in which case return null. */
945 type = TREE_TYPE (t);
948 switch (TREE_CODE (t))
952 unsigned HOST_WIDE_INT low;
954 int overflow = neg_double (TREE_INT_CST_LOW (t),
955 TREE_INT_CST_HIGH (t),
957 tem = build_int_2 (low, high);
958 TREE_TYPE (tem) = type;
961 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
962 TREE_CONSTANT_OVERFLOW (tem)
963 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
965 if (! TREE_OVERFLOW (tem)
966 || TREE_UNSIGNED (type)
972 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
973 /* Two's complement FP formats, such as c4x, may overflow. */
974 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
975 return fold_convert (type, tem);
980 tree rpart = negate_expr (TREE_REALPART (t));
981 tree ipart = negate_expr (TREE_IMAGPART (t));
983 if ((TREE_CODE (rpart) == REAL_CST
984 && TREE_CODE (ipart) == REAL_CST)
985 || (TREE_CODE (rpart) == INTEGER_CST
986 && TREE_CODE (ipart) == INTEGER_CST))
987 return build_complex (type, rpart, ipart);
992 return fold_convert (type, TREE_OPERAND (t, 0));
995 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
997 /* -(A + B) -> (-B) - A. */
998 if (negate_expr_p (TREE_OPERAND (t, 1))
999 && reorder_operands_p (TREE_OPERAND (t, 0),
1000 TREE_OPERAND (t, 1)))
1001 return fold_convert (type,
1002 fold (build (MINUS_EXPR, TREE_TYPE (t),
1003 negate_expr (TREE_OPERAND (t, 1)),
1004 TREE_OPERAND (t, 0))));
1005 /* -(A + B) -> (-A) - B. */
1006 if (negate_expr_p (TREE_OPERAND (t, 0)))
1007 return fold_convert (type,
1008 fold (build (MINUS_EXPR, TREE_TYPE (t),
1009 negate_expr (TREE_OPERAND (t, 0)),
1010 TREE_OPERAND (t, 1))));
1015 /* - (A - B) -> B - A */
1016 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1017 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1018 return fold_convert (type,
1019 fold (build (MINUS_EXPR, TREE_TYPE (t),
1020 TREE_OPERAND (t, 1),
1021 TREE_OPERAND (t, 0))));
1025 if (TREE_UNSIGNED (TREE_TYPE (t)))
1031 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1033 tem = TREE_OPERAND (t, 1);
1034 if (negate_expr_p (tem))
1035 return fold_convert (type,
1036 fold (build (TREE_CODE (t), TREE_TYPE (t),
1037 TREE_OPERAND (t, 0),
1038 negate_expr (tem))));
1039 tem = TREE_OPERAND (t, 0);
1040 if (negate_expr_p (tem))
1041 return fold_convert (type,
1042 fold (build (TREE_CODE (t), TREE_TYPE (t),
1044 TREE_OPERAND (t, 1))));
1049 /* Convert -((double)float) into (double)(-float). */
1050 if (TREE_CODE (type) == REAL_TYPE)
1052 tem = strip_float_extensions (t);
1053 if (tem != t && negate_expr_p (tem))
1054 return fold_convert (type, negate_expr (tem));
1059 /* Negate -f(x) as f(-x). */
1060 if (negate_mathfn_p (builtin_mathfn_code (t))
1061 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1063 tree fndecl, arg, arglist;
1065 fndecl = get_callee_fndecl (t);
1066 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1067 arglist = build_tree_list (NULL_TREE, arg);
1068 return build_function_call_expr (fndecl, arglist);
1076 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1077 return fold_convert (type, tem);
1080 /* Split a tree IN into a constant, literal and variable parts that could be
1081 combined with CODE to make IN. "constant" means an expression with
1082 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1083 commutative arithmetic operation. Store the constant part into *CONP,
1084 the literal in *LITP and return the variable part. If a part isn't
1085 present, set it to null. If the tree does not decompose in this way,
1086 return the entire tree as the variable part and the other parts as null.
1088 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1089 case, we negate an operand that was subtracted. Except if it is a
1090 literal for which we use *MINUS_LITP instead.
1092 If NEGATE_P is true, we are negating all of IN, again except a literal
1093 for which we use *MINUS_LITP instead.
1095 If IN is itself a literal or constant, return it as appropriate.
1097 Note that we do not guarantee that any of the three values will be the
1098 same type as IN, but they will have the same signedness and mode. */
1101 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1102 tree *minus_litp, int negate_p)
1110 /* Strip any conversions that don't change the machine mode or signedness. */
1111 STRIP_SIGN_NOPS (in);
1113 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1115 else if (TREE_CODE (in) == code
1116 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1117 /* We can associate addition and subtraction together (even
1118 though the C standard doesn't say so) for integers because
1119 the value is not affected. For reals, the value might be
1120 affected, so we can't. */
1121 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1122 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1124 tree op0 = TREE_OPERAND (in, 0);
1125 tree op1 = TREE_OPERAND (in, 1);
1126 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1127 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1129 /* First see if either of the operands is a literal, then a constant. */
1130 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1131 *litp = op0, op0 = 0;
1132 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1133 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1135 if (op0 != 0 && TREE_CONSTANT (op0))
1136 *conp = op0, op0 = 0;
1137 else if (op1 != 0 && TREE_CONSTANT (op1))
1138 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1140 /* If we haven't dealt with either operand, this is not a case we can
1141 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1142 if (op0 != 0 && op1 != 0)
1147 var = op1, neg_var_p = neg1_p;
1149 /* Now do any needed negations. */
1151 *minus_litp = *litp, *litp = 0;
1153 *conp = negate_expr (*conp);
1155 var = negate_expr (var);
1157 else if (TREE_CONSTANT (in))
1165 *minus_litp = *litp, *litp = 0;
1166 else if (*minus_litp)
1167 *litp = *minus_litp, *minus_litp = 0;
1168 *conp = negate_expr (*conp);
1169 var = negate_expr (var);
1175 /* Re-associate trees split by the above function. T1 and T2 are either
1176 expressions to associate or null. Return the new expression, if any. If
1177 we build an operation, do it in TYPE and with CODE. */
1180 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1187 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1188 try to fold this since we will have infinite recursion. But do
1189 deal with any NEGATE_EXPRs. */
1190 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1191 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1193 if (code == PLUS_EXPR)
1195 if (TREE_CODE (t1) == NEGATE_EXPR)
1196 return build (MINUS_EXPR, type, fold_convert (type, t2),
1197 fold_convert (type, TREE_OPERAND (t1, 0)));
1198 else if (TREE_CODE (t2) == NEGATE_EXPR)
1199 return build (MINUS_EXPR, type, fold_convert (type, t1),
1200 fold_convert (type, TREE_OPERAND (t2, 0)));
1202 return build (code, type, fold_convert (type, t1),
1203 fold_convert (type, t2));
1206 return fold (build (code, type, fold_convert (type, t1),
1207 fold_convert (type, t2)));
1210 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1211 to produce a new constant.
1213 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1216 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1218 unsigned HOST_WIDE_INT int1l, int2l;
1219 HOST_WIDE_INT int1h, int2h;
1220 unsigned HOST_WIDE_INT low;
1222 unsigned HOST_WIDE_INT garbagel;
1223 HOST_WIDE_INT garbageh;
1225 tree type = TREE_TYPE (arg1);
1226 int uns = TREE_UNSIGNED (type);
1228 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1230 int no_overflow = 0;
1232 int1l = TREE_INT_CST_LOW (arg1);
1233 int1h = TREE_INT_CST_HIGH (arg1);
1234 int2l = TREE_INT_CST_LOW (arg2);
1235 int2h = TREE_INT_CST_HIGH (arg2);
1240 low = int1l | int2l, hi = int1h | int2h;
1244 low = int1l ^ int2l, hi = int1h ^ int2h;
1248 low = int1l & int2l, hi = int1h & int2h;
1254 /* It's unclear from the C standard whether shifts can overflow.
1255 The following code ignores overflow; perhaps a C standard
1256 interpretation ruling is needed. */
1257 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1265 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1270 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1274 neg_double (int2l, int2h, &low, &hi);
1275 add_double (int1l, int1h, low, hi, &low, &hi);
1276 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1280 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1283 case TRUNC_DIV_EXPR:
1284 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1285 case EXACT_DIV_EXPR:
1286 /* This is a shortcut for a common special case. */
1287 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1288 && ! TREE_CONSTANT_OVERFLOW (arg1)
1289 && ! TREE_CONSTANT_OVERFLOW (arg2)
1290 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1292 if (code == CEIL_DIV_EXPR)
1295 low = int1l / int2l, hi = 0;
1299 /* ... fall through ... */
1301 case ROUND_DIV_EXPR:
1302 if (int2h == 0 && int2l == 1)
1304 low = int1l, hi = int1h;
1307 if (int1l == int2l && int1h == int2h
1308 && ! (int1l == 0 && int1h == 0))
1313 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1314 &low, &hi, &garbagel, &garbageh);
1317 case TRUNC_MOD_EXPR:
1318 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1319 /* This is a shortcut for a common special case. */
1320 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1321 && ! TREE_CONSTANT_OVERFLOW (arg1)
1322 && ! TREE_CONSTANT_OVERFLOW (arg2)
1323 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1325 if (code == CEIL_MOD_EXPR)
1327 low = int1l % int2l, hi = 0;
1331 /* ... fall through ... */
1333 case ROUND_MOD_EXPR:
1334 overflow = div_and_round_double (code, uns,
1335 int1l, int1h, int2l, int2h,
1336 &garbagel, &garbageh, &low, &hi);
1342 low = (((unsigned HOST_WIDE_INT) int1h
1343 < (unsigned HOST_WIDE_INT) int2h)
1344 || (((unsigned HOST_WIDE_INT) int1h
1345 == (unsigned HOST_WIDE_INT) int2h)
1348 low = (int1h < int2h
1349 || (int1h == int2h && int1l < int2l));
1351 if (low == (code == MIN_EXPR))
1352 low = int1l, hi = int1h;
1354 low = int2l, hi = int2h;
1361 /* If this is for a sizetype, can be represented as one (signed)
1362 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1365 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1366 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1367 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1368 return size_int_type_wide (low, type);
1371 t = build_int_2 (low, hi);
1372 TREE_TYPE (t) = TREE_TYPE (arg1);
1377 ? (!uns || is_sizetype) && overflow
1378 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1380 | TREE_OVERFLOW (arg1)
1381 | TREE_OVERFLOW (arg2));
1383 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1384 So check if force_fit_type truncated the value. */
1386 && ! TREE_OVERFLOW (t)
1387 && (TREE_INT_CST_HIGH (t) != hi
1388 || TREE_INT_CST_LOW (t) != low))
1389 TREE_OVERFLOW (t) = 1;
1391 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1392 | TREE_CONSTANT_OVERFLOW (arg1)
1393 | TREE_CONSTANT_OVERFLOW (arg2));
1397 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1398 constant. We assume ARG1 and ARG2 have the same data type, or at least
1399 are the same kind of constant and the same machine mode.
1401 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1404 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1409 if (TREE_CODE (arg1) == INTEGER_CST)
1410 return int_const_binop (code, arg1, arg2, notrunc);
1412 if (TREE_CODE (arg1) == REAL_CST)
1414 enum machine_mode mode;
1417 REAL_VALUE_TYPE value;
1420 d1 = TREE_REAL_CST (arg1);
1421 d2 = TREE_REAL_CST (arg2);
1423 type = TREE_TYPE (arg1);
1424 mode = TYPE_MODE (type);
1426 /* Don't perform operation if we honor signaling NaNs and
1427 either operand is a NaN. */
1428 if (HONOR_SNANS (mode)
1429 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1432 /* Don't perform operation if it would raise a division
1433 by zero exception. */
1434 if (code == RDIV_EXPR
1435 && REAL_VALUES_EQUAL (d2, dconst0)
1436 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1439 /* If either operand is a NaN, just return it. Otherwise, set up
1440 for floating-point trap; we return an overflow. */
1441 if (REAL_VALUE_ISNAN (d1))
1443 else if (REAL_VALUE_ISNAN (d2))
1446 REAL_ARITHMETIC (value, code, d1, d2);
1448 t = build_real (type, real_value_truncate (mode, value));
1451 = (force_fit_type (t, 0)
1452 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1453 TREE_CONSTANT_OVERFLOW (t)
1455 | TREE_CONSTANT_OVERFLOW (arg1)
1456 | TREE_CONSTANT_OVERFLOW (arg2);
1459 if (TREE_CODE (arg1) == COMPLEX_CST)
1461 tree type = TREE_TYPE (arg1);
1462 tree r1 = TREE_REALPART (arg1);
1463 tree i1 = TREE_IMAGPART (arg1);
1464 tree r2 = TREE_REALPART (arg2);
1465 tree i2 = TREE_IMAGPART (arg2);
1471 t = build_complex (type,
1472 const_binop (PLUS_EXPR, r1, r2, notrunc),
1473 const_binop (PLUS_EXPR, i1, i2, notrunc));
1477 t = build_complex (type,
1478 const_binop (MINUS_EXPR, r1, r2, notrunc),
1479 const_binop (MINUS_EXPR, i1, i2, notrunc));
1483 t = build_complex (type,
1484 const_binop (MINUS_EXPR,
1485 const_binop (MULT_EXPR,
1487 const_binop (MULT_EXPR,
1490 const_binop (PLUS_EXPR,
1491 const_binop (MULT_EXPR,
1493 const_binop (MULT_EXPR,
1501 = const_binop (PLUS_EXPR,
1502 const_binop (MULT_EXPR, r2, r2, notrunc),
1503 const_binop (MULT_EXPR, i2, i2, notrunc),
1506 t = build_complex (type,
1508 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1509 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1510 const_binop (PLUS_EXPR,
1511 const_binop (MULT_EXPR, r1, r2,
1513 const_binop (MULT_EXPR, i1, i2,
1516 magsquared, notrunc),
1518 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1519 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1520 const_binop (MINUS_EXPR,
1521 const_binop (MULT_EXPR, i1, r2,
1523 const_binop (MULT_EXPR, r1, i2,
1526 magsquared, notrunc));
1538 /* These are the hash table functions for the hash table of INTEGER_CST
1539 nodes of a sizetype. */
1541 /* Return the hash code code X, an INTEGER_CST. */
1544 size_htab_hash (const void *x)
1548 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1549 ^ htab_hash_pointer (TREE_TYPE (t))
1550 ^ (TREE_OVERFLOW (t) << 20));
1553 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1554 is the same as that given by *Y, which is the same. */
1557 size_htab_eq (const void *x, const void *y)
1562 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1563 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1564 && TREE_TYPE (xt) == TREE_TYPE (yt)
1565 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1568 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1569 bits are given by NUMBER and of the sizetype represented by KIND. */
1572 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1574 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1577 /* Likewise, but the desired type is specified explicitly. */
1579 static GTY (()) tree new_const;
1580 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1584 size_int_type_wide (HOST_WIDE_INT number, tree type)
1590 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1591 new_const = make_node (INTEGER_CST);
1594 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1595 hash table, we return the value from the hash table. Otherwise, we
1596 place that in the hash table and make a new node for the next time. */
1597 TREE_INT_CST_LOW (new_const) = number;
1598 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1599 TREE_TYPE (new_const) = type;
1600 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1601 = force_fit_type (new_const, 0);
1603 slot = htab_find_slot (size_htab, new_const, INSERT);
1609 new_const = make_node (INTEGER_CST);
1613 return (tree) *slot;
1616 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1617 is a tree code. The type of the result is taken from the operands.
1618 Both must be the same type integer type and it must be a size type.
1619 If the operands are constant, so is the result. */
1622 size_binop (enum tree_code code, tree arg0, tree arg1)
1624 tree type = TREE_TYPE (arg0);
1626 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1627 || type != TREE_TYPE (arg1))
1630 /* Handle the special case of two integer constants faster. */
1631 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1633 /* And some specific cases even faster than that. */
1634 if (code == PLUS_EXPR && integer_zerop (arg0))
1636 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1637 && integer_zerop (arg1))
1639 else if (code == MULT_EXPR && integer_onep (arg0))
1642 /* Handle general case of two integer constants. */
1643 return int_const_binop (code, arg0, arg1, 0);
1646 if (arg0 == error_mark_node || arg1 == error_mark_node)
1647 return error_mark_node;
1649 return fold (build (code, type, arg0, arg1));
1652 /* Given two values, either both of sizetype or both of bitsizetype,
1653 compute the difference between the two values. Return the value
1654 in signed type corresponding to the type of the operands. */
1657 size_diffop (tree arg0, tree arg1)
1659 tree type = TREE_TYPE (arg0);
1662 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1663 || type != TREE_TYPE (arg1))
1666 /* If the type is already signed, just do the simple thing. */
1667 if (! TREE_UNSIGNED (type))
1668 return size_binop (MINUS_EXPR, arg0, arg1);
1670 ctype = (type == bitsizetype || type == ubitsizetype
1671 ? sbitsizetype : ssizetype);
1673 /* If either operand is not a constant, do the conversions to the signed
1674 type and subtract. The hardware will do the right thing with any
1675 overflow in the subtraction. */
1676 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1677 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1678 fold_convert (ctype, arg1));
1680 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1681 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1682 overflow) and negate (which can't either). Special-case a result
1683 of zero while we're here. */
1684 if (tree_int_cst_equal (arg0, arg1))
1685 return fold_convert (ctype, integer_zero_node);
1686 else if (tree_int_cst_lt (arg1, arg0))
1687 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1689 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1690 fold_convert (ctype, size_binop (MINUS_EXPR,
1695 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1696 type TYPE. If no simplification can be done return NULL_TREE. */
1699 fold_convert_const (enum tree_code code, tree type, tree arg1)
1704 if (TREE_TYPE (arg1) == type)
1707 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1709 if (TREE_CODE (arg1) == INTEGER_CST)
1711 /* If we would build a constant wider than GCC supports,
1712 leave the conversion unfolded. */
1713 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1716 /* If we are trying to make a sizetype for a small integer, use
1717 size_int to pick up cached types to reduce duplicate nodes. */
1718 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1719 && !TREE_CONSTANT_OVERFLOW (arg1)
1720 && compare_tree_int (arg1, 10000) < 0)
1721 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1723 /* Given an integer constant, make new constant with new type,
1724 appropriately sign-extended or truncated. */
1725 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1726 TREE_INT_CST_HIGH (arg1));
1727 TREE_TYPE (t) = type;
1728 /* Indicate an overflow if (1) ARG1 already overflowed,
1729 or (2) force_fit_type indicates an overflow.
1730 Tell force_fit_type that an overflow has already occurred
1731 if ARG1 is a too-large unsigned value and T is signed.
1732 But don't indicate an overflow if converting a pointer. */
1734 = ((force_fit_type (t,
1735 (TREE_INT_CST_HIGH (arg1) < 0
1736 && (TREE_UNSIGNED (type)
1737 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1738 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1739 || TREE_OVERFLOW (arg1));
1740 TREE_CONSTANT_OVERFLOW (t)
1741 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1744 else if (TREE_CODE (arg1) == REAL_CST)
1746 /* The following code implements the floating point to integer
1747 conversion rules required by the Java Language Specification,
1748 that IEEE NaNs are mapped to zero and values that overflow
1749 the target precision saturate, i.e. values greater than
1750 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1751 are mapped to INT_MIN. These semantics are allowed by the
1752 C and C++ standards that simply state that the behavior of
1753 FP-to-integer conversion is unspecified upon overflow. */
1755 HOST_WIDE_INT high, low;
1758 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1762 case FIX_TRUNC_EXPR:
1763 real_trunc (&r, VOIDmode, &x);
1767 real_ceil (&r, VOIDmode, &x);
1770 case FIX_FLOOR_EXPR:
1771 real_floor (&r, VOIDmode, &x);
1778 /* If R is NaN, return zero and show we have an overflow. */
1779 if (REAL_VALUE_ISNAN (r))
1786 /* See if R is less than the lower bound or greater than the
1791 tree lt = TYPE_MIN_VALUE (type);
1792 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1793 if (REAL_VALUES_LESS (r, l))
1796 high = TREE_INT_CST_HIGH (lt);
1797 low = TREE_INT_CST_LOW (lt);
1803 tree ut = TYPE_MAX_VALUE (type);
1806 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1807 if (REAL_VALUES_LESS (u, r))
1810 high = TREE_INT_CST_HIGH (ut);
1811 low = TREE_INT_CST_LOW (ut);
1817 REAL_VALUE_TO_INT (&low, &high, r);
1819 t = build_int_2 (low, high);
1820 TREE_TYPE (t) = type;
1822 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1823 TREE_CONSTANT_OVERFLOW (t)
1824 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1828 else if (TREE_CODE (type) == REAL_TYPE)
1830 if (TREE_CODE (arg1) == INTEGER_CST)
1831 return build_real_from_int_cst (type, arg1);
1832 if (TREE_CODE (arg1) == REAL_CST)
1834 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1836 /* We make a copy of ARG1 so that we don't modify an
1837 existing constant tree. */
1838 t = copy_node (arg1);
1839 TREE_TYPE (t) = type;
1843 t = build_real (type,
1844 real_value_truncate (TYPE_MODE (type),
1845 TREE_REAL_CST (arg1)));
1848 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1849 TREE_CONSTANT_OVERFLOW (t)
1850 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1857 /* Convert expression ARG to type TYPE. Used by the middle-end for
1858 simple conversions in preference to calling the front-end's convert. */
1861 fold_convert (tree type, tree arg)
1863 tree orig = TREE_TYPE (arg);
1869 if (TREE_CODE (arg) == ERROR_MARK
1870 || TREE_CODE (type) == ERROR_MARK
1871 || TREE_CODE (orig) == ERROR_MARK)
1872 return error_mark_node;
1874 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1875 return fold (build1 (NOP_EXPR, type, arg));
1877 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1879 if (TREE_CODE (arg) == INTEGER_CST)
1881 tem = fold_convert_const (NOP_EXPR, type, arg);
1882 if (tem != NULL_TREE)
1885 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1886 return fold (build1 (NOP_EXPR, type, arg));
1887 if (TREE_CODE (orig) == COMPLEX_TYPE)
1889 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1890 return fold_convert (type, tem);
1892 if (TREE_CODE (orig) == VECTOR_TYPE
1893 && GET_MODE_SIZE (TYPE_MODE (type))
1894 == GET_MODE_SIZE (TYPE_MODE (orig)))
1895 return fold (build1 (NOP_EXPR, type, arg));
1897 else if (TREE_CODE (type) == REAL_TYPE)
1899 if (TREE_CODE (arg) == INTEGER_CST)
1901 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1902 if (tem != NULL_TREE)
1905 else if (TREE_CODE (arg) == REAL_CST)
1907 tem = fold_convert_const (NOP_EXPR, type, arg);
1908 if (tem != NULL_TREE)
1912 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1913 return fold (build1 (FLOAT_EXPR, type, arg));
1914 if (TREE_CODE (orig) == REAL_TYPE)
1915 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1917 if (TREE_CODE (orig) == COMPLEX_TYPE)
1919 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1920 return fold_convert (type, tem);
1923 else if (TREE_CODE (type) == COMPLEX_TYPE)
1925 if (INTEGRAL_TYPE_P (orig)
1926 || POINTER_TYPE_P (orig)
1927 || TREE_CODE (orig) == REAL_TYPE)
1928 return build (COMPLEX_EXPR, type,
1929 fold_convert (TREE_TYPE (type), arg),
1930 fold_convert (TREE_TYPE (type), integer_zero_node));
1931 if (TREE_CODE (orig) == COMPLEX_TYPE)
1935 if (TREE_CODE (arg) == COMPLEX_EXPR)
1937 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1938 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1939 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1942 arg = save_expr (arg);
1943 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1944 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1945 rpart = fold_convert (TREE_TYPE (type), rpart);
1946 ipart = fold_convert (TREE_TYPE (type), ipart);
1947 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1950 else if (TREE_CODE (type) == VECTOR_TYPE)
1952 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1953 && GET_MODE_SIZE (TYPE_MODE (type))
1954 == GET_MODE_SIZE (TYPE_MODE (orig)))
1955 return fold (build1 (NOP_EXPR, type, arg));
1956 if (TREE_CODE (orig) == VECTOR_TYPE
1957 && GET_MODE_SIZE (TYPE_MODE (type))
1958 == GET_MODE_SIZE (TYPE_MODE (orig)))
1959 return fold (build1 (NOP_EXPR, type, arg));
1961 else if (VOID_TYPE_P (type))
1962 return fold (build1 (CONVERT_EXPR, type, arg));
1966 /* Return an expr equal to X but certainly not valid as an lvalue. */
1973 /* These things are certainly not lvalues. */
1974 if (TREE_CODE (x) == NON_LVALUE_EXPR
1975 || TREE_CODE (x) == INTEGER_CST
1976 || TREE_CODE (x) == REAL_CST
1977 || TREE_CODE (x) == STRING_CST
1978 || TREE_CODE (x) == ADDR_EXPR)
1981 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1982 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1986 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1987 Zero means allow extended lvalues. */
1989 int pedantic_lvalues;
1991 /* When pedantic, return an expr equal to X but certainly not valid as a
1992 pedantic lvalue. Otherwise, return X. */
1995 pedantic_non_lvalue (tree x)
1997 if (pedantic_lvalues)
1998 return non_lvalue (x);
2003 /* Given a tree comparison code, return the code that is the logical inverse
2004 of the given code. It is not safe to do this for floating-point
2005 comparisons, except for NE_EXPR and EQ_EXPR. */
2007 static enum tree_code
2008 invert_tree_comparison (enum tree_code code)
2029 /* Similar, but return the comparison that results if the operands are
2030 swapped. This is safe for floating-point. */
2032 static enum tree_code
2033 swap_tree_comparison (enum tree_code code)
2054 /* Convert a comparison tree code from an enum tree_code representation
2055 into a compcode bit-based encoding. This function is the inverse of
2056 compcode_to_comparison. */
2059 comparison_to_compcode (enum tree_code code)
2080 /* Convert a compcode bit-based encoding of a comparison operator back
2081 to GCC's enum tree_code representation. This function is the
2082 inverse of comparison_to_compcode. */
2084 static enum tree_code
2085 compcode_to_comparison (int code)
2106 /* Return nonzero if CODE is a tree code that represents a truth value. */
2109 truth_value_p (enum tree_code code)
2111 return (TREE_CODE_CLASS (code) == '<'
2112 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2113 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2114 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2117 /* Return nonzero if two operands (typically of the same tree node)
2118 are necessarily equal. If either argument has side-effects this
2119 function returns zero.
2121 If ONLY_CONST is nonzero, only return nonzero for constants.
2122 This function tests whether the operands are indistinguishable;
2123 it does not test whether they are equal using C's == operation.
2124 The distinction is important for IEEE floating point, because
2125 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2126 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2128 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2129 even though it may hold multiple values during a function.
2130 This is because a GCC tree node guarantees that nothing else is
2131 executed between the evaluation of its "operands" (which may often
2132 be evaluated in arbitrary order). Hence if the operands themselves
2133 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2134 same value in each operand/subexpression. Hence a zero value for
2135 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2136 If comparing arbitrary expression trees, such as from different
2137 statements, ONLY_CONST must usually be nonzero. */
2140 operand_equal_p (tree arg0, tree arg1, int only_const)
2144 /* If both types don't have the same signedness, then we can't consider
2145 them equal. We must check this before the STRIP_NOPS calls
2146 because they may change the signedness of the arguments. */
2147 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2153 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2154 /* This is needed for conversions and for COMPONENT_REF.
2155 Might as well play it safe and always test this. */
2156 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2157 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2158 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2161 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2162 We don't care about side effects in that case because the SAVE_EXPR
2163 takes care of that for us. In all other cases, two expressions are
2164 equal if they have no side effects. If we have two identical
2165 expressions with side effects that should be treated the same due
2166 to the only side effects being identical SAVE_EXPR's, that will
2167 be detected in the recursive calls below. */
2168 if (arg0 == arg1 && ! only_const
2169 && (TREE_CODE (arg0) == SAVE_EXPR
2170 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2173 /* Next handle constant cases, those for which we can return 1 even
2174 if ONLY_CONST is set. */
2175 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2176 switch (TREE_CODE (arg0))
2179 return (! TREE_CONSTANT_OVERFLOW (arg0)
2180 && ! TREE_CONSTANT_OVERFLOW (arg1)
2181 && tree_int_cst_equal (arg0, arg1));
2184 return (! TREE_CONSTANT_OVERFLOW (arg0)
2185 && ! TREE_CONSTANT_OVERFLOW (arg1)
2186 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2187 TREE_REAL_CST (arg1)));
2193 if (TREE_CONSTANT_OVERFLOW (arg0)
2194 || TREE_CONSTANT_OVERFLOW (arg1))
2197 v1 = TREE_VECTOR_CST_ELTS (arg0);
2198 v2 = TREE_VECTOR_CST_ELTS (arg1);
2201 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2204 v1 = TREE_CHAIN (v1);
2205 v2 = TREE_CHAIN (v2);
2212 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2214 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2218 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2219 && ! memcmp (TREE_STRING_POINTER (arg0),
2220 TREE_STRING_POINTER (arg1),
2221 TREE_STRING_LENGTH (arg0)));
2224 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2233 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2236 /* Two conversions are equal only if signedness and modes match. */
2237 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2238 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2239 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2242 return operand_equal_p (TREE_OPERAND (arg0, 0),
2243 TREE_OPERAND (arg1, 0), 0);
2247 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2248 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2252 /* For commutative ops, allow the other order. */
2253 return (commutative_tree_code (TREE_CODE (arg0))
2254 && operand_equal_p (TREE_OPERAND (arg0, 0),
2255 TREE_OPERAND (arg1, 1), 0)
2256 && operand_equal_p (TREE_OPERAND (arg0, 1),
2257 TREE_OPERAND (arg1, 0), 0));
2260 /* If either of the pointer (or reference) expressions we are
2261 dereferencing contain a side effect, these cannot be equal. */
2262 if (TREE_SIDE_EFFECTS (arg0)
2263 || TREE_SIDE_EFFECTS (arg1))
2266 switch (TREE_CODE (arg0))
2269 return operand_equal_p (TREE_OPERAND (arg0, 0),
2270 TREE_OPERAND (arg1, 0), 0);
2274 case ARRAY_RANGE_REF:
2275 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2276 TREE_OPERAND (arg1, 0), 0)
2277 && operand_equal_p (TREE_OPERAND (arg0, 1),
2278 TREE_OPERAND (arg1, 1), 0));
2281 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2282 TREE_OPERAND (arg1, 0), 0)
2283 && operand_equal_p (TREE_OPERAND (arg0, 1),
2284 TREE_OPERAND (arg1, 1), 0)
2285 && operand_equal_p (TREE_OPERAND (arg0, 2),
2286 TREE_OPERAND (arg1, 2), 0));
2292 switch (TREE_CODE (arg0))
2295 case TRUTH_NOT_EXPR:
2296 return operand_equal_p (TREE_OPERAND (arg0, 0),
2297 TREE_OPERAND (arg1, 0), 0);
2300 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2303 /* If the CALL_EXPRs call different functions, then they
2304 clearly can not be equal. */
2305 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2306 TREE_OPERAND (arg1, 0), 0))
2309 /* Only consider const functions equivalent. */
2310 fndecl = get_callee_fndecl (arg0);
2311 if (fndecl == NULL_TREE
2312 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2315 /* Now see if all the arguments are the same. operand_equal_p
2316 does not handle TREE_LIST, so we walk the operands here
2317 feeding them to operand_equal_p. */
2318 arg0 = TREE_OPERAND (arg0, 1);
2319 arg1 = TREE_OPERAND (arg1, 1);
2320 while (arg0 && arg1)
2322 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2325 arg0 = TREE_CHAIN (arg0);
2326 arg1 = TREE_CHAIN (arg1);
2329 /* If we get here and both argument lists are exhausted
2330 then the CALL_EXPRs are equal. */
2331 return ! (arg0 || arg1);
2338 /* Consider __builtin_sqrt equal to sqrt. */
2339 return TREE_CODE (arg0) == FUNCTION_DECL
2340 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2341 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2342 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2349 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2350 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2352 When in doubt, return 0. */
2355 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2357 int unsignedp1, unsignedpo;
2358 tree primarg0, primarg1, primother;
2359 unsigned int correct_width;
2361 if (operand_equal_p (arg0, arg1, 0))
2364 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2365 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2368 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2369 and see if the inner values are the same. This removes any
2370 signedness comparison, which doesn't matter here. */
2371 primarg0 = arg0, primarg1 = arg1;
2372 STRIP_NOPS (primarg0);
2373 STRIP_NOPS (primarg1);
2374 if (operand_equal_p (primarg0, primarg1, 0))
2377 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2378 actual comparison operand, ARG0.
2380 First throw away any conversions to wider types
2381 already present in the operands. */
2383 primarg1 = get_narrower (arg1, &unsignedp1);
2384 primother = get_narrower (other, &unsignedpo);
2386 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2387 if (unsignedp1 == unsignedpo
2388 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2389 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2391 tree type = TREE_TYPE (arg0);
2393 /* Make sure shorter operand is extended the right way
2394 to match the longer operand. */
2395 primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2396 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2398 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2405 /* See if ARG is an expression that is either a comparison or is performing
2406 arithmetic on comparisons. The comparisons must only be comparing
2407 two different values, which will be stored in *CVAL1 and *CVAL2; if
2408 they are nonzero it means that some operands have already been found.
2409 No variables may be used anywhere else in the expression except in the
2410 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2411 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2413 If this is true, return 1. Otherwise, return zero. */
2416 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2418 enum tree_code code = TREE_CODE (arg);
2419 char class = TREE_CODE_CLASS (code);
2421 /* We can handle some of the 'e' cases here. */
2422 if (class == 'e' && code == TRUTH_NOT_EXPR)
2424 else if (class == 'e'
2425 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2426 || code == COMPOUND_EXPR))
2429 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2430 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2432 /* If we've already found a CVAL1 or CVAL2, this expression is
2433 two complex to handle. */
2434 if (*cval1 || *cval2)
2444 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2447 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2448 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2449 cval1, cval2, save_p));
2455 if (code == COND_EXPR)
2456 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2457 cval1, cval2, save_p)
2458 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2459 cval1, cval2, save_p)
2460 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2461 cval1, cval2, save_p));
2465 /* First see if we can handle the first operand, then the second. For
2466 the second operand, we know *CVAL1 can't be zero. It must be that
2467 one side of the comparison is each of the values; test for the
2468 case where this isn't true by failing if the two operands
2471 if (operand_equal_p (TREE_OPERAND (arg, 0),
2472 TREE_OPERAND (arg, 1), 0))
2476 *cval1 = TREE_OPERAND (arg, 0);
2477 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2479 else if (*cval2 == 0)
2480 *cval2 = TREE_OPERAND (arg, 0);
2481 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2486 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2488 else if (*cval2 == 0)
2489 *cval2 = TREE_OPERAND (arg, 1);
2490 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2502 /* ARG is a tree that is known to contain just arithmetic operations and
2503 comparisons. Evaluate the operations in the tree substituting NEW0 for
2504 any occurrence of OLD0 as an operand of a comparison and likewise for
2508 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2510 tree type = TREE_TYPE (arg);
2511 enum tree_code code = TREE_CODE (arg);
2512 char class = TREE_CODE_CLASS (code);
2514 /* We can handle some of the 'e' cases here. */
2515 if (class == 'e' && code == TRUTH_NOT_EXPR)
2517 else if (class == 'e'
2518 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2524 return fold (build1 (code, type,
2525 eval_subst (TREE_OPERAND (arg, 0),
2526 old0, new0, old1, new1)));
2529 return fold (build (code, type,
2530 eval_subst (TREE_OPERAND (arg, 0),
2531 old0, new0, old1, new1),
2532 eval_subst (TREE_OPERAND (arg, 1),
2533 old0, new0, old1, new1)));
2539 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2542 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2545 return fold (build (code, type,
2546 eval_subst (TREE_OPERAND (arg, 0),
2547 old0, new0, old1, new1),
2548 eval_subst (TREE_OPERAND (arg, 1),
2549 old0, new0, old1, new1),
2550 eval_subst (TREE_OPERAND (arg, 2),
2551 old0, new0, old1, new1)));
2555 /* Fall through - ??? */
2559 tree arg0 = TREE_OPERAND (arg, 0);
2560 tree arg1 = TREE_OPERAND (arg, 1);
2562 /* We need to check both for exact equality and tree equality. The
2563 former will be true if the operand has a side-effect. In that
2564 case, we know the operand occurred exactly once. */
2566 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2568 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2571 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2573 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2576 return fold (build (code, type, arg0, arg1));
2584 /* Return a tree for the case when the result of an expression is RESULT
2585 converted to TYPE and OMITTED was previously an operand of the expression
2586 but is now not needed (e.g., we folded OMITTED * 0).
2588 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2589 the conversion of RESULT to TYPE. */
2592 omit_one_operand (tree type, tree result, tree omitted)
2594 tree t = fold_convert (type, result);
2596 if (TREE_SIDE_EFFECTS (omitted))
2597 return build (COMPOUND_EXPR, type, omitted, t);
2599 return non_lvalue (t);
2602 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2605 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2607 tree t = fold_convert (type, result);
2609 if (TREE_SIDE_EFFECTS (omitted))
2610 return build (COMPOUND_EXPR, type, omitted, t);
2612 return pedantic_non_lvalue (t);
2615 /* Return a simplified tree node for the truth-negation of ARG. This
2616 never alters ARG itself. We assume that ARG is an operation that
2617 returns a truth value (0 or 1). */
2620 invert_truthvalue (tree arg)
2622 tree type = TREE_TYPE (arg);
2623 enum tree_code code = TREE_CODE (arg);
2625 if (code == ERROR_MARK)
2628 /* If this is a comparison, we can simply invert it, except for
2629 floating-point non-equality comparisons, in which case we just
2630 enclose a TRUTH_NOT_EXPR around what we have. */
2632 if (TREE_CODE_CLASS (code) == '<')
2634 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2635 && !flag_unsafe_math_optimizations
2638 return build1 (TRUTH_NOT_EXPR, type, arg);
2640 return build (invert_tree_comparison (code), type,
2641 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2647 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2649 case TRUTH_AND_EXPR:
2650 return build (TRUTH_OR_EXPR, type,
2651 invert_truthvalue (TREE_OPERAND (arg, 0)),
2652 invert_truthvalue (TREE_OPERAND (arg, 1)));
2655 return build (TRUTH_AND_EXPR, type,
2656 invert_truthvalue (TREE_OPERAND (arg, 0)),
2657 invert_truthvalue (TREE_OPERAND (arg, 1)));
2659 case TRUTH_XOR_EXPR:
2660 /* Here we can invert either operand. We invert the first operand
2661 unless the second operand is a TRUTH_NOT_EXPR in which case our
2662 result is the XOR of the first operand with the inside of the
2663 negation of the second operand. */
2665 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2666 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2667 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2669 return build (TRUTH_XOR_EXPR, type,
2670 invert_truthvalue (TREE_OPERAND (arg, 0)),
2671 TREE_OPERAND (arg, 1));
2673 case TRUTH_ANDIF_EXPR:
2674 return build (TRUTH_ORIF_EXPR, type,
2675 invert_truthvalue (TREE_OPERAND (arg, 0)),
2676 invert_truthvalue (TREE_OPERAND (arg, 1)));
2678 case TRUTH_ORIF_EXPR:
2679 return build (TRUTH_ANDIF_EXPR, type,
2680 invert_truthvalue (TREE_OPERAND (arg, 0)),
2681 invert_truthvalue (TREE_OPERAND (arg, 1)));
2683 case TRUTH_NOT_EXPR:
2684 return TREE_OPERAND (arg, 0);
2687 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2688 invert_truthvalue (TREE_OPERAND (arg, 1)),
2689 invert_truthvalue (TREE_OPERAND (arg, 2)));
2692 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2693 invert_truthvalue (TREE_OPERAND (arg, 1)));
2695 case WITH_RECORD_EXPR:
2696 return build (WITH_RECORD_EXPR, type,
2697 invert_truthvalue (TREE_OPERAND (arg, 0)),
2698 TREE_OPERAND (arg, 1));
2700 case NON_LVALUE_EXPR:
2701 return invert_truthvalue (TREE_OPERAND (arg, 0));
2706 return build1 (TREE_CODE (arg), type,
2707 invert_truthvalue (TREE_OPERAND (arg, 0)));
2710 if (!integer_onep (TREE_OPERAND (arg, 1)))
2712 return build (EQ_EXPR, type, arg,
2713 fold_convert (type, integer_zero_node));
2716 return build1 (TRUTH_NOT_EXPR, type, arg);
2718 case CLEANUP_POINT_EXPR:
2719 return build1 (CLEANUP_POINT_EXPR, type,
2720 invert_truthvalue (TREE_OPERAND (arg, 0)));
2725 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2727 return build1 (TRUTH_NOT_EXPR, type, arg);
2730 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2731 operands are another bit-wise operation with a common input. If so,
2732 distribute the bit operations to save an operation and possibly two if
2733 constants are involved. For example, convert
2734 (A | B) & (A | C) into A | (B & C)
2735 Further simplification will occur if B and C are constants.
2737 If this optimization cannot be done, 0 will be returned. */
2740 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2745 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2746 || TREE_CODE (arg0) == code
2747 || (TREE_CODE (arg0) != BIT_AND_EXPR
2748 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2751 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2753 common = TREE_OPERAND (arg0, 0);
2754 left = TREE_OPERAND (arg0, 1);
2755 right = TREE_OPERAND (arg1, 1);
2757 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2759 common = TREE_OPERAND (arg0, 0);
2760 left = TREE_OPERAND (arg0, 1);
2761 right = TREE_OPERAND (arg1, 0);
2763 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2765 common = TREE_OPERAND (arg0, 1);
2766 left = TREE_OPERAND (arg0, 0);
2767 right = TREE_OPERAND (arg1, 1);
2769 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2771 common = TREE_OPERAND (arg0, 1);
2772 left = TREE_OPERAND (arg0, 0);
2773 right = TREE_OPERAND (arg1, 0);
2778 return fold (build (TREE_CODE (arg0), type, common,
2779 fold (build (code, type, left, right))));
2782 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2783 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2786 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2789 tree result = build (BIT_FIELD_REF, type, inner,
2790 size_int (bitsize), bitsize_int (bitpos));
2792 TREE_UNSIGNED (result) = unsignedp;
2797 /* Optimize a bit-field compare.
2799 There are two cases: First is a compare against a constant and the
2800 second is a comparison of two items where the fields are at the same
2801 bit position relative to the start of a chunk (byte, halfword, word)
2802 large enough to contain it. In these cases we can avoid the shift
2803 implicit in bitfield extractions.
2805 For constants, we emit a compare of the shifted constant with the
2806 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2807 compared. For two fields at the same position, we do the ANDs with the
2808 similar mask and compare the result of the ANDs.
2810 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2811 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2812 are the left and right operands of the comparison, respectively.
2814 If the optimization described above can be done, we return the resulting
2815 tree. Otherwise we return zero. */
2818 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2821 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2822 tree type = TREE_TYPE (lhs);
2823 tree signed_type, unsigned_type;
2824 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2825 enum machine_mode lmode, rmode, nmode;
2826 int lunsignedp, runsignedp;
2827 int lvolatilep = 0, rvolatilep = 0;
2828 tree linner, rinner = NULL_TREE;
2832 /* Get all the information about the extractions being done. If the bit size
2833 if the same as the size of the underlying object, we aren't doing an
2834 extraction at all and so can do nothing. We also don't want to
2835 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2836 then will no longer be able to replace it. */
2837 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2838 &lunsignedp, &lvolatilep);
2839 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2840 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2845 /* If this is not a constant, we can only do something if bit positions,
2846 sizes, and signedness are the same. */
2847 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2848 &runsignedp, &rvolatilep);
2850 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2851 || lunsignedp != runsignedp || offset != 0
2852 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2856 /* See if we can find a mode to refer to this field. We should be able to,
2857 but fail if we can't. */
2858 nmode = get_best_mode (lbitsize, lbitpos,
2859 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2860 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2861 TYPE_ALIGN (TREE_TYPE (rinner))),
2862 word_mode, lvolatilep || rvolatilep);
2863 if (nmode == VOIDmode)
2866 /* Set signed and unsigned types of the precision of this mode for the
2868 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2869 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2871 /* Compute the bit position and size for the new reference and our offset
2872 within it. If the new reference is the same size as the original, we
2873 won't optimize anything, so return zero. */
2874 nbitsize = GET_MODE_BITSIZE (nmode);
2875 nbitpos = lbitpos & ~ (nbitsize - 1);
2877 if (nbitsize == lbitsize)
2880 if (BYTES_BIG_ENDIAN)
2881 lbitpos = nbitsize - lbitsize - lbitpos;
2883 /* Make the mask to be used against the extracted field. */
2884 mask = build_int_2 (~0, ~0);
2885 TREE_TYPE (mask) = unsigned_type;
2886 force_fit_type (mask, 0);
2887 mask = fold_convert (unsigned_type, mask);
2888 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2889 mask = const_binop (RSHIFT_EXPR, mask,
2890 size_int (nbitsize - lbitsize - lbitpos), 0);
2893 /* If not comparing with constant, just rework the comparison
2895 return build (code, compare_type,
2896 build (BIT_AND_EXPR, unsigned_type,
2897 make_bit_field_ref (linner, unsigned_type,
2898 nbitsize, nbitpos, 1),
2900 build (BIT_AND_EXPR, unsigned_type,
2901 make_bit_field_ref (rinner, unsigned_type,
2902 nbitsize, nbitpos, 1),
2905 /* Otherwise, we are handling the constant case. See if the constant is too
2906 big for the field. Warn and return a tree of for 0 (false) if so. We do
2907 this not only for its own sake, but to avoid having to test for this
2908 error case below. If we didn't, we might generate wrong code.
2910 For unsigned fields, the constant shifted right by the field length should
2911 be all zero. For signed fields, the high-order bits should agree with
2916 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2917 fold_convert (unsigned_type, rhs),
2918 size_int (lbitsize), 0)))
2920 warning ("comparison is always %d due to width of bit-field",
2922 return fold_convert (compare_type,
2924 ? integer_one_node : integer_zero_node));
2929 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2930 size_int (lbitsize - 1), 0);
2931 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2933 warning ("comparison is always %d due to width of bit-field",
2935 return fold_convert (compare_type,
2937 ? integer_one_node : integer_zero_node));
2941 /* Single-bit compares should always be against zero. */
2942 if (lbitsize == 1 && ! integer_zerop (rhs))
2944 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2945 rhs = fold_convert (type, integer_zero_node);
2948 /* Make a new bitfield reference, shift the constant over the
2949 appropriate number of bits and mask it with the computed mask
2950 (in case this was a signed field). If we changed it, make a new one. */
2951 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2954 TREE_SIDE_EFFECTS (lhs) = 1;
2955 TREE_THIS_VOLATILE (lhs) = 1;
2958 rhs = fold (const_binop (BIT_AND_EXPR,
2959 const_binop (LSHIFT_EXPR,
2960 fold_convert (unsigned_type, rhs),
2961 size_int (lbitpos), 0),
2964 return build (code, compare_type,
2965 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2969 /* Subroutine for fold_truthop: decode a field reference.
2971 If EXP is a comparison reference, we return the innermost reference.
2973 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2974 set to the starting bit number.
2976 If the innermost field can be completely contained in a mode-sized
2977 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2979 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2980 otherwise it is not changed.
2982 *PUNSIGNEDP is set to the signedness of the field.
2984 *PMASK is set to the mask used. This is either contained in a
2985 BIT_AND_EXPR or derived from the width of the field.
2987 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2989 Return 0 if this is not a component reference or is one that we can't
2990 do anything with. */
2993 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2994 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2995 int *punsignedp, int *pvolatilep,
2996 tree *pmask, tree *pand_mask)
2998 tree outer_type = 0;
3000 tree mask, inner, offset;
3002 unsigned int precision;
3004 /* All the optimizations using this function assume integer fields.
3005 There are problems with FP fields since the type_for_size call
3006 below can fail for, e.g., XFmode. */
3007 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3010 /* We are interested in the bare arrangement of bits, so strip everything
3011 that doesn't affect the machine mode. However, record the type of the
3012 outermost expression if it may matter below. */
3013 if (TREE_CODE (exp) == NOP_EXPR
3014 || TREE_CODE (exp) == CONVERT_EXPR
3015 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3016 outer_type = TREE_TYPE (exp);
3019 if (TREE_CODE (exp) == BIT_AND_EXPR)
3021 and_mask = TREE_OPERAND (exp, 1);
3022 exp = TREE_OPERAND (exp, 0);
3023 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3024 if (TREE_CODE (and_mask) != INTEGER_CST)
3028 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3029 punsignedp, pvolatilep);
3030 if ((inner == exp && and_mask == 0)
3031 || *pbitsize < 0 || offset != 0
3032 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3035 /* If the number of bits in the reference is the same as the bitsize of
3036 the outer type, then the outer type gives the signedness. Otherwise
3037 (in case of a small bitfield) the signedness is unchanged. */
3038 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3039 *punsignedp = TREE_UNSIGNED (outer_type);
3041 /* Compute the mask to access the bitfield. */
3042 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3043 precision = TYPE_PRECISION (unsigned_type);
3045 mask = build_int_2 (~0, ~0);
3046 TREE_TYPE (mask) = unsigned_type;
3047 force_fit_type (mask, 0);
3048 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3049 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3051 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3053 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3054 fold_convert (unsigned_type, and_mask), mask));
3057 *pand_mask = and_mask;
3061 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3065 all_ones_mask_p (tree mask, int size)
3067 tree type = TREE_TYPE (mask);
3068 unsigned int precision = TYPE_PRECISION (type);
3071 tmask = build_int_2 (~0, ~0);
3072 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3073 force_fit_type (tmask, 0);
3075 tree_int_cst_equal (mask,
3076 const_binop (RSHIFT_EXPR,
3077 const_binop (LSHIFT_EXPR, tmask,
3078 size_int (precision - size),
3080 size_int (precision - size), 0));
3083 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3084 represents the sign bit of EXP's type. If EXP represents a sign
3085 or zero extension, also test VAL against the unextended type.
3086 The return value is the (sub)expression whose sign bit is VAL,
3087 or NULL_TREE otherwise. */
3090 sign_bit_p (tree exp, tree val)
3092 unsigned HOST_WIDE_INT mask_lo, lo;
3093 HOST_WIDE_INT mask_hi, hi;
3097 /* Tree EXP must have an integral type. */
3098 t = TREE_TYPE (exp);
3099 if (! INTEGRAL_TYPE_P (t))
3102 /* Tree VAL must be an integer constant. */
3103 if (TREE_CODE (val) != INTEGER_CST
3104 || TREE_CONSTANT_OVERFLOW (val))
3107 width = TYPE_PRECISION (t);
3108 if (width > HOST_BITS_PER_WIDE_INT)
3110 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3113 mask_hi = ((unsigned HOST_WIDE_INT) -1
3114 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3120 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3123 mask_lo = ((unsigned HOST_WIDE_INT) -1
3124 >> (HOST_BITS_PER_WIDE_INT - width));
3127 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3128 treat VAL as if it were unsigned. */
3129 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3130 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3133 /* Handle extension from a narrower type. */
3134 if (TREE_CODE (exp) == NOP_EXPR
3135 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3136 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3141 /* Subroutine for fold_truthop: determine if an operand is simple enough
3142 to be evaluated unconditionally. */
3145 simple_operand_p (tree exp)
3147 /* Strip any conversions that don't change the machine mode. */
3148 while ((TREE_CODE (exp) == NOP_EXPR
3149 || TREE_CODE (exp) == CONVERT_EXPR)
3150 && (TYPE_MODE (TREE_TYPE (exp))
3151 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3152 exp = TREE_OPERAND (exp, 0);
3154 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3156 && ! TREE_ADDRESSABLE (exp)
3157 && ! TREE_THIS_VOLATILE (exp)
3158 && ! DECL_NONLOCAL (exp)
3159 /* Don't regard global variables as simple. They may be
3160 allocated in ways unknown to the compiler (shared memory,
3161 #pragma weak, etc). */
3162 && ! TREE_PUBLIC (exp)
3163 && ! DECL_EXTERNAL (exp)
3164 /* Loading a static variable is unduly expensive, but global
3165 registers aren't expensive. */
3166 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3169 /* The following functions are subroutines to fold_range_test and allow it to
3170 try to change a logical combination of comparisons into a range test.
3173 X == 2 || X == 3 || X == 4 || X == 5
3177 (unsigned) (X - 2) <= 3
3179 We describe each set of comparisons as being either inside or outside
3180 a range, using a variable named like IN_P, and then describe the
3181 range with a lower and upper bound. If one of the bounds is omitted,
3182 it represents either the highest or lowest value of the type.
3184 In the comments below, we represent a range by two numbers in brackets
3185 preceded by a "+" to designate being inside that range, or a "-" to
3186 designate being outside that range, so the condition can be inverted by
3187 flipping the prefix. An omitted bound is represented by a "-". For
3188 example, "- [-, 10]" means being outside the range starting at the lowest
3189 possible value and ending at 10, in other words, being greater than 10.
3190 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3193 We set up things so that the missing bounds are handled in a consistent
3194 manner so neither a missing bound nor "true" and "false" need to be
3195 handled using a special case. */
3197 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3198 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3199 and UPPER1_P are nonzero if the respective argument is an upper bound
3200 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3201 must be specified for a comparison. ARG1 will be converted to ARG0's
3202 type if both are specified. */
3205 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3206 tree arg1, int upper1_p)
3212 /* If neither arg represents infinity, do the normal operation.
3213 Else, if not a comparison, return infinity. Else handle the special
3214 comparison rules. Note that most of the cases below won't occur, but
3215 are handled for consistency. */
3217 if (arg0 != 0 && arg1 != 0)
3219 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3220 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3222 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3225 if (TREE_CODE_CLASS (code) != '<')
3228 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3229 for neither. In real maths, we cannot assume open ended ranges are
3230 the same. But, this is computer arithmetic, where numbers are finite.
3231 We can therefore make the transformation of any unbounded range with
3232 the value Z, Z being greater than any representable number. This permits
3233 us to treat unbounded ranges as equal. */
3234 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3235 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3239 result = sgn0 == sgn1;
3242 result = sgn0 != sgn1;
3245 result = sgn0 < sgn1;
3248 result = sgn0 <= sgn1;
3251 result = sgn0 > sgn1;
3254 result = sgn0 >= sgn1;
3260 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3263 /* Given EXP, a logical expression, set the range it is testing into
3264 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3265 actually being tested. *PLOW and *PHIGH will be made of the same type
3266 as the returned expression. If EXP is not a comparison, we will most
3267 likely not be returning a useful value and range. */
3270 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3272 enum tree_code code;
3273 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3274 tree orig_type = NULL_TREE;
3276 tree low, high, n_low, n_high;
3278 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3279 and see if we can refine the range. Some of the cases below may not
3280 happen, but it doesn't seem worth worrying about this. We "continue"
3281 the outer loop when we've changed something; otherwise we "break"
3282 the switch, which will "break" the while. */
3285 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3289 code = TREE_CODE (exp);
3291 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3293 if (first_rtl_op (code) > 0)
3294 arg0 = TREE_OPERAND (exp, 0);
3295 if (TREE_CODE_CLASS (code) == '<'
3296 || TREE_CODE_CLASS (code) == '1'
3297 || TREE_CODE_CLASS (code) == '2')
3298 type = TREE_TYPE (arg0);
3299 if (TREE_CODE_CLASS (code) == '2'
3300 || TREE_CODE_CLASS (code) == '<'
3301 || (TREE_CODE_CLASS (code) == 'e'
3302 && TREE_CODE_LENGTH (code) > 1))
3303 arg1 = TREE_OPERAND (exp, 1);
3306 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3307 lose a cast by accident. */
3308 if (type != NULL_TREE && orig_type == NULL_TREE)
3313 case TRUTH_NOT_EXPR:
3314 in_p = ! in_p, exp = arg0;
3317 case EQ_EXPR: case NE_EXPR:
3318 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3319 /* We can only do something if the range is testing for zero
3320 and if the second operand is an integer constant. Note that
3321 saying something is "in" the range we make is done by
3322 complementing IN_P since it will set in the initial case of
3323 being not equal to zero; "out" is leaving it alone. */
3324 if (low == 0 || high == 0
3325 || ! integer_zerop (low) || ! integer_zerop (high)
3326 || TREE_CODE (arg1) != INTEGER_CST)
3331 case NE_EXPR: /* - [c, c] */
3334 case EQ_EXPR: /* + [c, c] */
3335 in_p = ! in_p, low = high = arg1;
3337 case GT_EXPR: /* - [-, c] */
3338 low = 0, high = arg1;
3340 case GE_EXPR: /* + [c, -] */
3341 in_p = ! in_p, low = arg1, high = 0;
3343 case LT_EXPR: /* - [c, -] */
3344 low = arg1, high = 0;
3346 case LE_EXPR: /* + [-, c] */
3347 in_p = ! in_p, low = 0, high = arg1;
3355 /* If this is an unsigned comparison, we also know that EXP is
3356 greater than or equal to zero. We base the range tests we make
3357 on that fact, so we record it here so we can parse existing
3359 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3361 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3362 1, fold_convert (type, integer_zero_node),
3366 in_p = n_in_p, low = n_low, high = n_high;
3368 /* If the high bound is missing, but we have a nonzero low
3369 bound, reverse the range so it goes from zero to the low bound
3371 if (high == 0 && low && ! integer_zerop (low))
3374 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3375 integer_one_node, 0);
3376 low = fold_convert (type, integer_zero_node);
3382 /* (-x) IN [a,b] -> x in [-b, -a] */
3383 n_low = range_binop (MINUS_EXPR, type,
3384 fold_convert (type, integer_zero_node),
3386 n_high = range_binop (MINUS_EXPR, type,
3387 fold_convert (type, integer_zero_node),
3389 low = n_low, high = n_high;
3395 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3396 fold_convert (type, integer_one_node));
3399 case PLUS_EXPR: case MINUS_EXPR:
3400 if (TREE_CODE (arg1) != INTEGER_CST)
3403 /* If EXP is signed, any overflow in the computation is undefined,
3404 so we don't worry about it so long as our computations on
3405 the bounds don't overflow. For unsigned, overflow is defined
3406 and this is exactly the right thing. */
3407 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3408 type, low, 0, arg1, 0);
3409 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3410 type, high, 1, arg1, 0);
3411 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3412 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3415 /* Check for an unsigned range which has wrapped around the maximum
3416 value thus making n_high < n_low, and normalize it. */
3417 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3419 low = range_binop (PLUS_EXPR, type, n_high, 0,
3420 integer_one_node, 0);
3421 high = range_binop (MINUS_EXPR, type, n_low, 0,
3422 integer_one_node, 0);
3424 /* If the range is of the form +/- [ x+1, x ], we won't
3425 be able to normalize it. But then, it represents the
3426 whole range or the empty set, so make it
3428 if (tree_int_cst_equal (n_low, low)
3429 && tree_int_cst_equal (n_high, high))
3435 low = n_low, high = n_high;
3440 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3441 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3444 if (! INTEGRAL_TYPE_P (type)
3445 || (low != 0 && ! int_fits_type_p (low, type))
3446 || (high != 0 && ! int_fits_type_p (high, type)))
3449 n_low = low, n_high = high;
3452 n_low = fold_convert (type, n_low);
3455 n_high = fold_convert (type, n_high);
3457 /* If we're converting from an unsigned to a signed type,
3458 we will be doing the comparison as unsigned. The tests above
3459 have already verified that LOW and HIGH are both positive.
3461 So we have to make sure that the original unsigned value will
3462 be interpreted as positive. */
3463 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3465 tree equiv_type = (*lang_hooks.types.type_for_mode)
3466 (TYPE_MODE (type), 1);
3469 /* A range without an upper bound is, naturally, unbounded.
3470 Since convert would have cropped a very large value, use
3471 the max value for the destination type. */
3473 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3474 : TYPE_MAX_VALUE (type);
3476 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3477 high_positive = fold (build (RSHIFT_EXPR, type,
3481 integer_one_node)));
3483 /* If the low bound is specified, "and" the range with the
3484 range for which the original unsigned value will be
3488 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3489 1, n_low, n_high, 1,
3490 fold_convert (type, integer_zero_node),
3494 in_p = (n_in_p == in_p);
3498 /* Otherwise, "or" the range with the range of the input
3499 that will be interpreted as negative. */
3500 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3501 0, n_low, n_high, 1,
3502 fold_convert (type, integer_zero_node),
3506 in_p = (in_p != n_in_p);
3511 low = n_low, high = n_high;
3521 /* If EXP is a constant, we can evaluate whether this is true or false. */
3522 if (TREE_CODE (exp) == INTEGER_CST)
3524 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3526 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3532 *pin_p = in_p, *plow = low, *phigh = high;
3536 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3537 type, TYPE, return an expression to test if EXP is in (or out of, depending
3538 on IN_P) the range. */
3541 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3543 tree etype = TREE_TYPE (exp);
3547 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3548 return invert_truthvalue (value);
3550 if (low == 0 && high == 0)
3551 return fold_convert (type, integer_one_node);
3554 return fold (build (LE_EXPR, type, exp, high));
3557 return fold (build (GE_EXPR, type, exp, low));
3559 if (operand_equal_p (low, high, 0))
3560 return fold (build (EQ_EXPR, type, exp, low));
3562 if (integer_zerop (low))
3564 if (! TREE_UNSIGNED (etype))
3566 etype = (*lang_hooks.types.unsigned_type) (etype);
3567 high = fold_convert (etype, high);
3568 exp = fold_convert (etype, exp);
3570 return build_range_check (type, exp, 1, 0, high);
3573 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3574 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3576 unsigned HOST_WIDE_INT lo;
3580 prec = TYPE_PRECISION (etype);
3581 if (prec <= HOST_BITS_PER_WIDE_INT)
3584 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3588 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3589 lo = (unsigned HOST_WIDE_INT) -1;
3592 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3594 if (TREE_UNSIGNED (etype))
3596 etype = (*lang_hooks.types.signed_type) (etype);
3597 exp = fold_convert (etype, exp);
3599 return fold (build (GT_EXPR, type, exp,
3600 fold_convert (etype, integer_zero_node)));
3604 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3605 && ! TREE_OVERFLOW (value))
3606 return build_range_check (type,
3607 fold (build (MINUS_EXPR, etype, exp, low)),
3608 1, fold_convert (etype, integer_zero_node),
3614 /* Given two ranges, see if we can merge them into one. Return 1 if we
3615 can, 0 if we can't. Set the output range into the specified parameters. */
3618 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3619 tree high0, int in1_p, tree low1, tree high1)
3627 int lowequal = ((low0 == 0 && low1 == 0)
3628 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3629 low0, 0, low1, 0)));
3630 int highequal = ((high0 == 0 && high1 == 0)
3631 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3632 high0, 1, high1, 1)));
3634 /* Make range 0 be the range that starts first, or ends last if they
3635 start at the same value. Swap them if it isn't. */
3636 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3639 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3640 high1, 1, high0, 1))))
3642 temp = in0_p, in0_p = in1_p, in1_p = temp;
3643 tem = low0, low0 = low1, low1 = tem;
3644 tem = high0, high0 = high1, high1 = tem;
3647 /* Now flag two cases, whether the ranges are disjoint or whether the
3648 second range is totally subsumed in the first. Note that the tests
3649 below are simplified by the ones above. */
3650 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3651 high0, 1, low1, 0));
3652 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3653 high1, 1, high0, 1));
3655 /* We now have four cases, depending on whether we are including or
3656 excluding the two ranges. */
3659 /* If they don't overlap, the result is false. If the second range
3660 is a subset it is the result. Otherwise, the range is from the start
3661 of the second to the end of the first. */
3663 in_p = 0, low = high = 0;
3665 in_p = 1, low = low1, high = high1;
3667 in_p = 1, low = low1, high = high0;
3670 else if (in0_p && ! in1_p)
3672 /* If they don't overlap, the result is the first range. If they are
3673 equal, the result is false. If the second range is a subset of the
3674 first, and the ranges begin at the same place, we go from just after
3675 the end of the first range to the end of the second. If the second
3676 range is not a subset of the first, or if it is a subset and both
3677 ranges end at the same place, the range starts at the start of the
3678 first range and ends just before the second range.
3679 Otherwise, we can't describe this as a single range. */
3681 in_p = 1, low = low0, high = high0;
3682 else if (lowequal && highequal)
3683 in_p = 0, low = high = 0;
3684 else if (subset && lowequal)
3686 in_p = 1, high = high0;
3687 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3688 integer_one_node, 0);
3690 else if (! subset || highequal)
3692 in_p = 1, low = low0;
3693 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3694 integer_one_node, 0);
3700 else if (! in0_p && in1_p)
3702 /* If they don't overlap, the result is the second range. If the second
3703 is a subset of the first, the result is false. Otherwise,
3704 the range starts just after the first range and ends at the
3705 end of the second. */
3707 in_p = 1, low = low1, high = high1;
3708 else if (subset || highequal)
3709 in_p = 0, low = high = 0;
3712 in_p = 1, high = high1;
3713 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3714 integer_one_node, 0);
3720 /* The case where we are excluding both ranges. Here the complex case
3721 is if they don't overlap. In that case, the only time we have a
3722 range is if they are adjacent. If the second is a subset of the
3723 first, the result is the first. Otherwise, the range to exclude
3724 starts at the beginning of the first range and ends at the end of the
3728 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3729 range_binop (PLUS_EXPR, NULL_TREE,
3731 integer_one_node, 1),
3733 in_p = 0, low = low0, high = high1;
3738 in_p = 0, low = low0, high = high0;
3740 in_p = 0, low = low0, high = high1;
3743 *pin_p = in_p, *plow = low, *phigh = high;
3747 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3748 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3751 /* EXP is some logical combination of boolean tests. See if we can
3752 merge it into some range test. Return the new tree if so. */
3755 fold_range_test (tree exp)
3757 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3758 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3759 int in0_p, in1_p, in_p;
3760 tree low0, low1, low, high0, high1, high;
3761 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3762 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3765 /* If this is an OR operation, invert both sides; we will invert
3766 again at the end. */
3768 in0_p = ! in0_p, in1_p = ! in1_p;
3770 /* If both expressions are the same, if we can merge the ranges, and we
3771 can build the range test, return it or it inverted. If one of the
3772 ranges is always true or always false, consider it to be the same
3773 expression as the other. */
3774 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3775 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3777 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3779 : rhs != 0 ? rhs : integer_zero_node,
3781 return or_op ? invert_truthvalue (tem) : tem;
3783 /* On machines where the branch cost is expensive, if this is a
3784 short-circuited branch and the underlying object on both sides
3785 is the same, make a non-short-circuit operation. */
3786 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3787 && lhs != 0 && rhs != 0
3788 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3789 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3790 && operand_equal_p (lhs, rhs, 0))
3792 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3793 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3794 which cases we can't do this. */
3795 if (simple_operand_p (lhs))
3796 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3797 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3798 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3799 TREE_OPERAND (exp, 1));
3801 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3802 && ! CONTAINS_PLACEHOLDER_P (lhs))
3804 tree common = save_expr (lhs);
3806 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3807 or_op ? ! in0_p : in0_p,
3809 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3810 or_op ? ! in1_p : in1_p,
3812 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3813 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3814 TREE_TYPE (exp), lhs, rhs);
3821 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3822 bit value. Arrange things so the extra bits will be set to zero if and
3823 only if C is signed-extended to its full width. If MASK is nonzero,
3824 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3827 unextend (tree c, int p, int unsignedp, tree mask)
3829 tree type = TREE_TYPE (c);
3830 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3833 if (p == modesize || unsignedp)
3836 /* We work by getting just the sign bit into the low-order bit, then
3837 into the high-order bit, then sign-extend. We then XOR that value
3839 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3840 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3842 /* We must use a signed type in order to get an arithmetic right shift.
3843 However, we must also avoid introducing accidental overflows, so that
3844 a subsequent call to integer_zerop will work. Hence we must
3845 do the type conversion here. At this point, the constant is either
3846 zero or one, and the conversion to a signed type can never overflow.
3847 We could get an overflow if this conversion is done anywhere else. */
3848 if (TREE_UNSIGNED (type))
3849 temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3851 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3852 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3854 temp = const_binop (BIT_AND_EXPR, temp,
3855 fold_convert (TREE_TYPE (c), mask), 0);
3856 /* If necessary, convert the type back to match the type of C. */
3857 if (TREE_UNSIGNED (type))
3858 temp = fold_convert (type, temp);
3860 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3863 /* Find ways of folding logical expressions of LHS and RHS:
3864 Try to merge two comparisons to the same innermost item.
3865 Look for range tests like "ch >= '0' && ch <= '9'".
3866 Look for combinations of simple terms on machines with expensive branches
3867 and evaluate the RHS unconditionally.
3869 For example, if we have p->a == 2 && p->b == 4 and we can make an
3870 object large enough to span both A and B, we can do this with a comparison
3871 against the object ANDed with the a mask.
3873 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3874 operations to do this with one comparison.
3876 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3877 function and the one above.
3879 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3880 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3882 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3885 We return the simplified tree or 0 if no optimization is possible. */
3888 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3890 /* If this is the "or" of two comparisons, we can do something if
3891 the comparisons are NE_EXPR. If this is the "and", we can do something
3892 if the comparisons are EQ_EXPR. I.e.,
3893 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3895 WANTED_CODE is this operation code. For single bit fields, we can
3896 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3897 comparison for one-bit fields. */
3899 enum tree_code wanted_code;
3900 enum tree_code lcode, rcode;
3901 tree ll_arg, lr_arg, rl_arg, rr_arg;
3902 tree ll_inner, lr_inner, rl_inner, rr_inner;
3903 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3904 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3905 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3906 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3907 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3908 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3909 enum machine_mode lnmode, rnmode;
3910 tree ll_mask, lr_mask, rl_mask, rr_mask;
3911 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3912 tree l_const, r_const;
3913 tree lntype, rntype, result;
3914 int first_bit, end_bit;
3917 /* Start by getting the comparison codes. Fail if anything is volatile.
3918 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3919 it were surrounded with a NE_EXPR. */
3921 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3924 lcode = TREE_CODE (lhs);
3925 rcode = TREE_CODE (rhs);
3927 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3928 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3930 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3931 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3933 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3936 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3937 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3939 ll_arg = TREE_OPERAND (lhs, 0);
3940 lr_arg = TREE_OPERAND (lhs, 1);
3941 rl_arg = TREE_OPERAND (rhs, 0);
3942 rr_arg = TREE_OPERAND (rhs, 1);
3944 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3945 if (simple_operand_p (ll_arg)
3946 && simple_operand_p (lr_arg)
3947 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3951 if (operand_equal_p (ll_arg, rl_arg, 0)
3952 && operand_equal_p (lr_arg, rr_arg, 0))
3954 int lcompcode, rcompcode;
3956 lcompcode = comparison_to_compcode (lcode);
3957 rcompcode = comparison_to_compcode (rcode);
3958 compcode = (code == TRUTH_AND_EXPR)
3959 ? lcompcode & rcompcode
3960 : lcompcode | rcompcode;
3962 else if (operand_equal_p (ll_arg, rr_arg, 0)
3963 && operand_equal_p (lr_arg, rl_arg, 0))
3965 int lcompcode, rcompcode;
3967 rcode = swap_tree_comparison (rcode);
3968 lcompcode = comparison_to_compcode (lcode);
3969 rcompcode = comparison_to_compcode (rcode);
3970 compcode = (code == TRUTH_AND_EXPR)
3971 ? lcompcode & rcompcode
3972 : lcompcode | rcompcode;
3977 if (compcode == COMPCODE_TRUE)
3978 return fold_convert (truth_type, integer_one_node);
3979 else if (compcode == COMPCODE_FALSE)
3980 return fold_convert (truth_type, integer_zero_node);
3981 else if (compcode != -1)
3982 return build (compcode_to_comparison (compcode),
3983 truth_type, ll_arg, lr_arg);
3986 /* If the RHS can be evaluated unconditionally and its operands are
3987 simple, it wins to evaluate the RHS unconditionally on machines
3988 with expensive branches. In this case, this isn't a comparison
3989 that can be merged. Avoid doing this if the RHS is a floating-point
3990 comparison since those can trap. */
3992 if (BRANCH_COST >= 2
3993 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3994 && simple_operand_p (rl_arg)
3995 && simple_operand_p (rr_arg))
3997 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3998 if (code == TRUTH_OR_EXPR
3999 && lcode == NE_EXPR && integer_zerop (lr_arg)
4000 && rcode == NE_EXPR && integer_zerop (rr_arg)
4001 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4002 return build (NE_EXPR, truth_type,
4003 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4007 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4008 if (code == TRUTH_AND_EXPR
4009 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4010 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4011 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4012 return build (EQ_EXPR, truth_type,
4013 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4017 return build (code, truth_type, lhs, rhs);
4020 /* See if the comparisons can be merged. Then get all the parameters for
4023 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4024 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4028 ll_inner = decode_field_reference (ll_arg,
4029 &ll_bitsize, &ll_bitpos, &ll_mode,
4030 &ll_unsignedp, &volatilep, &ll_mask,
4032 lr_inner = decode_field_reference (lr_arg,
4033 &lr_bitsize, &lr_bitpos, &lr_mode,
4034 &lr_unsignedp, &volatilep, &lr_mask,
4036 rl_inner = decode_field_reference (rl_arg,
4037 &rl_bitsize, &rl_bitpos, &rl_mode,
4038 &rl_unsignedp, &volatilep, &rl_mask,
4040 rr_inner = decode_field_reference (rr_arg,
4041 &rr_bitsize, &rr_bitpos, &rr_mode,
4042 &rr_unsignedp, &volatilep, &rr_mask,
4045 /* It must be true that the inner operation on the lhs of each
4046 comparison must be the same if we are to be able to do anything.
4047 Then see if we have constants. If not, the same must be true for
4049 if (volatilep || ll_inner == 0 || rl_inner == 0
4050 || ! operand_equal_p (ll_inner, rl_inner, 0))
4053 if (TREE_CODE (lr_arg) == INTEGER_CST
4054 && TREE_CODE (rr_arg) == INTEGER_CST)
4055 l_const = lr_arg, r_const = rr_arg;
4056 else if (lr_inner == 0 || rr_inner == 0
4057 || ! operand_equal_p (lr_inner, rr_inner, 0))
4060 l_const = r_const = 0;
4062 /* If either comparison code is not correct for our logical operation,
4063 fail. However, we can convert a one-bit comparison against zero into
4064 the opposite comparison against that bit being set in the field. */
4066 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4067 if (lcode != wanted_code)
4069 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4071 /* Make the left operand unsigned, since we are only interested
4072 in the value of one bit. Otherwise we are doing the wrong
4081 /* This is analogous to the code for l_const above. */
4082 if (rcode != wanted_code)
4084 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4093 /* After this point all optimizations will generate bit-field
4094 references, which we might not want. */
4095 if (! (*lang_hooks.can_use_bit_fields_p) ())
4098 /* See if we can find a mode that contains both fields being compared on
4099 the left. If we can't, fail. Otherwise, update all constants and masks
4100 to be relative to a field of that size. */
4101 first_bit = MIN (ll_bitpos, rl_bitpos);
4102 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4103 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4104 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4106 if (lnmode == VOIDmode)
4109 lnbitsize = GET_MODE_BITSIZE (lnmode);
4110 lnbitpos = first_bit & ~ (lnbitsize - 1);
4111 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4112 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4114 if (BYTES_BIG_ENDIAN)
4116 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4117 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4120 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4121 size_int (xll_bitpos), 0);
4122 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4123 size_int (xrl_bitpos), 0);
4127 l_const = fold_convert (lntype, l_const);
4128 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4129 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4130 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4131 fold (build1 (BIT_NOT_EXPR,
4135 warning ("comparison is always %d", wanted_code == NE_EXPR);
4137 return fold_convert (truth_type,
4138 wanted_code == NE_EXPR
4139 ? integer_one_node : integer_zero_node);
4144 r_const = fold_convert (lntype, r_const);
4145 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4146 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4147 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4148 fold (build1 (BIT_NOT_EXPR,
4152 warning ("comparison is always %d", wanted_code == NE_EXPR);
4154 return fold_convert (truth_type,
4155 wanted_code == NE_EXPR
4156 ? integer_one_node : integer_zero_node);
4160 /* If the right sides are not constant, do the same for it. Also,
4161 disallow this optimization if a size or signedness mismatch occurs
4162 between the left and right sides. */
4165 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4166 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4167 /* Make sure the two fields on the right
4168 correspond to the left without being swapped. */
4169 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4172 first_bit = MIN (lr_bitpos, rr_bitpos);
4173 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4174 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4175 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4177 if (rnmode == VOIDmode)
4180 rnbitsize = GET_MODE_BITSIZE (rnmode);
4181 rnbitpos = first_bit & ~ (rnbitsize - 1);
4182 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4183 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4185 if (BYTES_BIG_ENDIAN)
4187 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4188 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4191 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4192 size_int (xlr_bitpos), 0);
4193 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4194 size_int (xrr_bitpos), 0);
4196 /* Make a mask that corresponds to both fields being compared.
4197 Do this for both items being compared. If the operands are the
4198 same size and the bits being compared are in the same position
4199 then we can do this by masking both and comparing the masked
4201 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4202 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4203 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4205 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4206 ll_unsignedp || rl_unsignedp);
4207 if (! all_ones_mask_p (ll_mask, lnbitsize))
4208 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4210 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4211 lr_unsignedp || rr_unsignedp);
4212 if (! all_ones_mask_p (lr_mask, rnbitsize))
4213 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4215 return build (wanted_code, truth_type, lhs, rhs);
4218 /* There is still another way we can do something: If both pairs of
4219 fields being compared are adjacent, we may be able to make a wider
4220 field containing them both.
4222 Note that we still must mask the lhs/rhs expressions. Furthermore,
4223 the mask must be shifted to account for the shift done by
4224 make_bit_field_ref. */
4225 if ((ll_bitsize + ll_bitpos == rl_bitpos
4226 && lr_bitsize + lr_bitpos == rr_bitpos)
4227 || (ll_bitpos == rl_bitpos + rl_bitsize
4228 && lr_bitpos == rr_bitpos + rr_bitsize))
4232 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4233 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4234 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4235 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4237 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4238 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4239 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4240 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4242 /* Convert to the smaller type before masking out unwanted bits. */
4244 if (lntype != rntype)
4246 if (lnbitsize > rnbitsize)
4248 lhs = fold_convert (rntype, lhs);
4249 ll_mask = fold_convert (rntype, ll_mask);
4252 else if (lnbitsize < rnbitsize)
4254 rhs = fold_convert (lntype, rhs);
4255 lr_mask = fold_convert (lntype, lr_mask);
4260 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4261 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4263 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4264 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4266 return build (wanted_code, truth_type, lhs, rhs);
4272 /* Handle the case of comparisons with constants. If there is something in
4273 common between the masks, those bits of the constants must be the same.
4274 If not, the condition is always false. Test for this to avoid generating
4275 incorrect code below. */
4276 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4277 if (! integer_zerop (result)
4278 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4279 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4281 if (wanted_code == NE_EXPR)
4283 warning ("`or' of unmatched not-equal tests is always 1");
4284 return fold_convert (truth_type, integer_one_node);
4288 warning ("`and' of mutually exclusive equal-tests is always 0");
4289 return fold_convert (truth_type, integer_zero_node);
4293 /* Construct the expression we will return. First get the component
4294 reference we will make. Unless the mask is all ones the width of
4295 that field, perform the mask operation. Then compare with the
4297 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4298 ll_unsignedp || rl_unsignedp);
4300 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4301 if (! all_ones_mask_p (ll_mask, lnbitsize))
4302 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4304 return build (wanted_code, truth_type, result,
4305 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4308 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4312 optimize_minmax_comparison (tree t)
4314 tree type = TREE_TYPE (t);
4315 tree arg0 = TREE_OPERAND (t, 0);
4316 enum tree_code op_code;
4317 tree comp_const = TREE_OPERAND (t, 1);
4319 int consts_equal, consts_lt;
4322 STRIP_SIGN_NOPS (arg0);
4324 op_code = TREE_CODE (arg0);
4325 minmax_const = TREE_OPERAND (arg0, 1);
4326 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4327 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4328 inner = TREE_OPERAND (arg0, 0);
4330 /* If something does not permit us to optimize, return the original tree. */
4331 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4332 || TREE_CODE (comp_const) != INTEGER_CST
4333 || TREE_CONSTANT_OVERFLOW (comp_const)
4334 || TREE_CODE (minmax_const) != INTEGER_CST
4335 || TREE_CONSTANT_OVERFLOW (minmax_const))
4338 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4339 and GT_EXPR, doing the rest with recursive calls using logical
4341 switch (TREE_CODE (t))
4343 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4345 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4349 fold (build (TRUTH_ORIF_EXPR, type,
4350 optimize_minmax_comparison
4351 (build (EQ_EXPR, type, arg0, comp_const)),
4352 optimize_minmax_comparison
4353 (build (GT_EXPR, type, arg0, comp_const))));
4356 if (op_code == MAX_EXPR && consts_equal)
4357 /* MAX (X, 0) == 0 -> X <= 0 */
4358 return fold (build (LE_EXPR, type, inner, comp_const));
4360 else if (op_code == MAX_EXPR && consts_lt)
4361 /* MAX (X, 0) == 5 -> X == 5 */
4362 return fold (build (EQ_EXPR, type, inner, comp_const));
4364 else if (op_code == MAX_EXPR)
4365 /* MAX (X, 0) == -1 -> false */
4366 return omit_one_operand (type, integer_zero_node, inner);
4368 else if (consts_equal)
4369 /* MIN (X, 0) == 0 -> X >= 0 */
4370 return fold (build (GE_EXPR, type, inner, comp_const));
4373 /* MIN (X, 0) == 5 -> false */
4374 return omit_one_operand (type, integer_zero_node, inner);
4377 /* MIN (X, 0) == -1 -> X == -1 */
4378 return fold (build (EQ_EXPR, type, inner, comp_const));
4381 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4382 /* MAX (X, 0) > 0 -> X > 0
4383 MAX (X, 0) > 5 -> X > 5 */
4384 return fold (build (GT_EXPR, type, inner, comp_const));
4386 else if (op_code == MAX_EXPR)
4387 /* MAX (X, 0) > -1 -> true */
4388 return omit_one_operand (type, integer_one_node, inner);
4390 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4391 /* MIN (X, 0) > 0 -> false
4392 MIN (X, 0) > 5 -> false */
4393 return omit_one_operand (type, integer_zero_node, inner);
4396 /* MIN (X, 0) > -1 -> X > -1 */
4397 return fold (build (GT_EXPR, type, inner, comp_const));
4404 /* T is an integer expression that is being multiplied, divided, or taken a
4405 modulus (CODE says which and what kind of divide or modulus) by a
4406 constant C. See if we can eliminate that operation by folding it with
4407 other operations already in T. WIDE_TYPE, if non-null, is a type that
4408 should be used for the computation if wider than our type.
4410 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4411 (X * 2) + (Y * 4). We must, however, be assured that either the original
4412 expression would not overflow or that overflow is undefined for the type
4413 in the language in question.
4415 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4416 the machine has a multiply-accumulate insn or that this is part of an
4417 addressing calculation.
4419 If we return a non-null expression, it is an equivalent form of the
4420 original computation, but need not be in the original type. */
4423 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4425 /* To avoid exponential search depth, refuse to allow recursion past
4426 three levels. Beyond that (1) it's highly unlikely that we'll find
4427 something interesting and (2) we've probably processed it before
4428 when we built the inner expression. */
4437 ret = extract_muldiv_1 (t, c, code, wide_type);
4444 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4446 tree type = TREE_TYPE (t);
4447 enum tree_code tcode = TREE_CODE (t);
4448 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4449 > GET_MODE_SIZE (TYPE_MODE (type)))
4450 ? wide_type : type);
4452 int same_p = tcode == code;
4453 tree op0 = NULL_TREE, op1 = NULL_TREE;
4455 /* Don't deal with constants of zero here; they confuse the code below. */
4456 if (integer_zerop (c))
4459 if (TREE_CODE_CLASS (tcode) == '1')
4460 op0 = TREE_OPERAND (t, 0);
4462 if (TREE_CODE_CLASS (tcode) == '2')
4463 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4465 /* Note that we need not handle conditional operations here since fold
4466 already handles those cases. So just do arithmetic here. */
4470 /* For a constant, we can always simplify if we are a multiply
4471 or (for divide and modulus) if it is a multiple of our constant. */
4472 if (code == MULT_EXPR
4473 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4474 return const_binop (code, fold_convert (ctype, t),
4475 fold_convert (ctype, c), 0);
4478 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4479 /* If op0 is an expression ... */
4480 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4481 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4482 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4483 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4484 /* ... and is unsigned, and its type is smaller than ctype,
4485 then we cannot pass through as widening. */
4486 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4487 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4488 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4489 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4490 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4491 /* ... or its type is larger than ctype,
4492 then we cannot pass through this truncation. */
4493 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4494 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4495 /* ... or signedness changes for division or modulus,
4496 then we cannot pass through this conversion. */
4497 || (code != MULT_EXPR
4498 && (TREE_UNSIGNED (ctype)
4499 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4502 /* Pass the constant down and see if we can make a simplification. If
4503 we can, replace this expression with the inner simplification for
4504 possible later conversion to our or some other type. */
4505 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4506 && TREE_CODE (t2) == INTEGER_CST
4507 && ! TREE_CONSTANT_OVERFLOW (t2)
4508 && (0 != (t1 = extract_muldiv (op0, t2, code,
4510 ? ctype : NULL_TREE))))
4514 case NEGATE_EXPR: case ABS_EXPR:
4515 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4516 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4519 case MIN_EXPR: case MAX_EXPR:
4520 /* If widening the type changes the signedness, then we can't perform
4521 this optimization as that changes the result. */
4522 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4525 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4526 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4527 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4529 if (tree_int_cst_sgn (c) < 0)
4530 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4532 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4533 fold_convert (ctype, t2)));
4537 case WITH_RECORD_EXPR:
4538 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4539 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4540 TREE_OPERAND (t, 1));
4543 case LSHIFT_EXPR: case RSHIFT_EXPR:
4544 /* If the second operand is constant, this is a multiplication
4545 or floor division, by a power of two, so we can treat it that
4546 way unless the multiplier or divisor overflows. */
4547 if (TREE_CODE (op1) == INTEGER_CST
4548 /* const_binop may not detect overflow correctly,
4549 so check for it explicitly here. */
4550 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4551 && TREE_INT_CST_HIGH (op1) == 0
4552 && 0 != (t1 = fold_convert (ctype,
4553 const_binop (LSHIFT_EXPR,
4556 && ! TREE_OVERFLOW (t1))
4557 return extract_muldiv (build (tcode == LSHIFT_EXPR
4558 ? MULT_EXPR : FLOOR_DIV_EXPR,
4559 ctype, fold_convert (ctype, op0), t1),
4560 c, code, wide_type);
4563 case PLUS_EXPR: case MINUS_EXPR:
4564 /* See if we can eliminate the operation on both sides. If we can, we
4565 can return a new PLUS or MINUS. If we can't, the only remaining
4566 cases where we can do anything are if the second operand is a
4568 t1 = extract_muldiv (op0, c, code, wide_type);
4569 t2 = extract_muldiv (op1, c, code, wide_type);
4570 if (t1 != 0 && t2 != 0
4571 && (code == MULT_EXPR
4572 /* If not multiplication, we can only do this if both operands
4573 are divisible by c. */
4574 || (multiple_of_p (ctype, op0, c)
4575 && multiple_of_p (ctype, op1, c))))
4576 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4577 fold_convert (ctype, t2)));
4579 /* If this was a subtraction, negate OP1 and set it to be an addition.
4580 This simplifies the logic below. */
4581 if (tcode == MINUS_EXPR)
4582 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4584 if (TREE_CODE (op1) != INTEGER_CST)
4587 /* If either OP1 or C are negative, this optimization is not safe for
4588 some of the division and remainder types while for others we need
4589 to change the code. */
4590 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4592 if (code == CEIL_DIV_EXPR)
4593 code = FLOOR_DIV_EXPR;
4594 else if (code == FLOOR_DIV_EXPR)
4595 code = CEIL_DIV_EXPR;
4596 else if (code != MULT_EXPR
4597 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4601 /* If it's a multiply or a division/modulus operation of a multiple
4602 of our constant, do the operation and verify it doesn't overflow. */
4603 if (code == MULT_EXPR
4604 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4606 op1 = const_binop (code, fold_convert (ctype, op1),
4607 fold_convert (ctype, c), 0);
4608 /* We allow the constant to overflow with wrapping semantics. */
4610 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4616 /* If we have an unsigned type is not a sizetype, we cannot widen
4617 the operation since it will change the result if the original
4618 computation overflowed. */
4619 if (TREE_UNSIGNED (ctype)
4620 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4624 /* If we were able to eliminate our operation from the first side,
4625 apply our operation to the second side and reform the PLUS. */
4626 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4627 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4629 /* The last case is if we are a multiply. In that case, we can
4630 apply the distributive law to commute the multiply and addition
4631 if the multiplication of the constants doesn't overflow. */
4632 if (code == MULT_EXPR)
4633 return fold (build (tcode, ctype,
4634 fold (build (code, ctype,
4635 fold_convert (ctype, op0),
4636 fold_convert (ctype, c))),
4642 /* We have a special case here if we are doing something like
4643 (C * 8) % 4 since we know that's zero. */
4644 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4645 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4646 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4647 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4648 return omit_one_operand (type, integer_zero_node, op0);
4650 /* ... fall through ... */
4652 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4653 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4654 /* If we can extract our operation from the LHS, do so and return a
4655 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4656 do something only if the second operand is a constant. */
4658 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4659 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4660 fold_convert (ctype, op1)));
4661 else if (tcode == MULT_EXPR && code == MULT_EXPR
4662 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4663 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4664 fold_convert (ctype, t1)));
4665 else if (TREE_CODE (op1) != INTEGER_CST)
4668 /* If these are the same operation types, we can associate them
4669 assuming no overflow. */
4671 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4672 fold_convert (ctype, c), 0))
4673 && ! TREE_OVERFLOW (t1))
4674 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4676 /* If these operations "cancel" each other, we have the main
4677 optimizations of this pass, which occur when either constant is a
4678 multiple of the other, in which case we replace this with either an
4679 operation or CODE or TCODE.
4681 If we have an unsigned type that is not a sizetype, we cannot do
4682 this since it will change the result if the original computation
4684 if ((! TREE_UNSIGNED (ctype)
4685 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4687 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4688 || (tcode == MULT_EXPR
4689 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4690 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4692 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4693 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4694 fold_convert (ctype,
4695 const_binop (TRUNC_DIV_EXPR,
4697 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4698 return fold (build (code, ctype, fold_convert (ctype, op0),
4699 fold_convert (ctype,
4700 const_binop (TRUNC_DIV_EXPR,
4712 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4713 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4714 that we may sometimes modify the tree. */
4717 strip_compound_expr (tree t, tree s)
4719 enum tree_code code = TREE_CODE (t);
4721 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4722 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4723 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4724 return TREE_OPERAND (t, 1);
4726 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4727 don't bother handling any other types. */
4728 else if (code == COND_EXPR)
4730 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4731 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4732 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4734 else if (TREE_CODE_CLASS (code) == '1')
4735 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4736 else if (TREE_CODE_CLASS (code) == '<'
4737 || TREE_CODE_CLASS (code) == '2')
4739 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4740 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4746 /* Return a node which has the indicated constant VALUE (either 0 or
4747 1), and is of the indicated TYPE. */
4750 constant_boolean_node (int value, tree type)
4752 if (type == integer_type_node)
4753 return value ? integer_one_node : integer_zero_node;
4754 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4755 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4759 tree t = build_int_2 (value, 0);
4761 TREE_TYPE (t) = type;
4766 /* Utility function for the following routine, to see how complex a nesting of
4767 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4768 we don't care (to avoid spending too much time on complex expressions.). */
4771 count_cond (tree expr, int lim)
4775 if (TREE_CODE (expr) != COND_EXPR)
4780 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4781 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4782 return MIN (lim, 1 + ctrue + cfalse);
4785 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4786 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4787 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4788 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4789 COND is the first argument to CODE; otherwise (as in the example
4790 given here), it is the second argument. TYPE is the type of the
4791 original expression. */
4794 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4795 tree cond, tree arg, int cond_first_p)
4797 tree test, true_value, false_value;
4798 tree lhs = NULL_TREE;
4799 tree rhs = NULL_TREE;
4800 /* In the end, we'll produce a COND_EXPR. Both arms of the
4801 conditional expression will be binary operations. The left-hand
4802 side of the expression to be executed if the condition is true
4803 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4804 of the expression to be executed if the condition is true will be
4805 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4806 but apply to the expression to be executed if the conditional is
4812 /* These are the codes to use for the left-hand side and right-hand
4813 side of the COND_EXPR. Normally, they are the same as CODE. */
4814 enum tree_code lhs_code = code;
4815 enum tree_code rhs_code = code;
4816 /* And these are the types of the expressions. */
4817 tree lhs_type = type;
4818 tree rhs_type = type;
4823 true_rhs = false_rhs = &arg;
4824 true_lhs = &true_value;
4825 false_lhs = &false_value;
4829 true_lhs = false_lhs = &arg;
4830 true_rhs = &true_value;
4831 false_rhs = &false_value;
4834 if (TREE_CODE (cond) == COND_EXPR)
4836 test = TREE_OPERAND (cond, 0);
4837 true_value = TREE_OPERAND (cond, 1);
4838 false_value = TREE_OPERAND (cond, 2);
4839 /* If this operand throws an expression, then it does not make
4840 sense to try to perform a logical or arithmetic operation
4841 involving it. Instead of building `a + throw 3' for example,
4842 we simply build `a, throw 3'. */
4843 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4847 lhs_code = COMPOUND_EXPR;
4848 lhs_type = void_type_node;
4853 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4857 rhs_code = COMPOUND_EXPR;
4858 rhs_type = void_type_node;
4866 tree testtype = TREE_TYPE (cond);
4868 true_value = fold_convert (testtype, integer_one_node);
4869 false_value = fold_convert (testtype, integer_zero_node);
4872 /* If ARG is complex we want to make sure we only evaluate it once. Though
4873 this is only required if it is volatile, it might be more efficient even
4874 if it is not. However, if we succeed in folding one part to a constant,
4875 we do not need to make this SAVE_EXPR. Since we do this optimization
4876 primarily to see if we do end up with constant and this SAVE_EXPR
4877 interferes with later optimizations, suppressing it when we can is
4880 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4881 do so. Don't try to see if the result is a constant if an arm is a
4882 COND_EXPR since we get exponential behavior in that case. */
4884 if (saved_expr_p (arg))
4886 else if (lhs == 0 && rhs == 0
4887 && !TREE_CONSTANT (arg)
4888 && (*lang_hooks.decls.global_bindings_p) () == 0
4889 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4890 || TREE_SIDE_EFFECTS (arg)))
4892 if (TREE_CODE (true_value) != COND_EXPR)
4893 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4895 if (TREE_CODE (false_value) != COND_EXPR)
4896 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4898 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4899 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4901 arg = save_expr (arg);
4903 save = saved_expr_p (arg);
4908 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4910 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4912 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4914 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4915 ahead of the COND_EXPR we made. Otherwise we would have it only
4916 evaluated in one branch, with the other branch using the result
4917 but missing the evaluation code. Beware that the save_expr call
4918 above might not return a SAVE_EXPR, so testing the TREE_CODE
4919 of ARG is not enough to decide here. Â */
4921 return build (COMPOUND_EXPR, type,
4922 fold_convert (void_type_node, arg),
4923 strip_compound_expr (test, arg));
4925 return fold_convert (type, test);
4929 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4931 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4932 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4933 ADDEND is the same as X.
4935 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4936 and finite. The problematic cases are when X is zero, and its mode
4937 has signed zeros. In the case of rounding towards -infinity,
4938 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4939 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4942 fold_real_zero_addition_p (tree type, tree addend, int negate)
4944 if (!real_zerop (addend))
4947 /* Don't allow the fold with -fsignaling-nans. */
4948 if (HONOR_SNANS (TYPE_MODE (type)))
4951 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4952 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4955 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4956 if (TREE_CODE (addend) == REAL_CST
4957 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4960 /* The mode has signed zeros, and we have to honor their sign.
4961 In this situation, there is only one case we can return true for.
4962 X - 0 is the same as X unless rounding towards -infinity is
4964 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4967 /* Subroutine of fold() that checks comparisons of built-in math
4968 functions against real constants.
4970 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4971 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4972 is the type of the result and ARG0 and ARG1 are the operands of the
4973 comparison. ARG1 must be a TREE_REAL_CST.
4975 The function returns the constant folded tree if a simplification
4976 can be made, and NULL_TREE otherwise. */
4979 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4980 tree type, tree arg0, tree arg1)
4984 if (fcode == BUILT_IN_SQRT
4985 || fcode == BUILT_IN_SQRTF
4986 || fcode == BUILT_IN_SQRTL)
4988 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4989 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4991 c = TREE_REAL_CST (arg1);
4992 if (REAL_VALUE_NEGATIVE (c))
4994 /* sqrt(x) < y is always false, if y is negative. */
4995 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4996 return omit_one_operand (type,
4997 fold_convert (type, integer_zero_node),
5000 /* sqrt(x) > y is always true, if y is negative and we
5001 don't care about NaNs, i.e. negative values of x. */
5002 if (code == NE_EXPR || !HONOR_NANS (mode))
5003 return omit_one_operand (type,
5004 fold_convert (type, integer_one_node),
5007 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5008 return fold (build (GE_EXPR, type, arg,
5009 build_real (TREE_TYPE (arg), dconst0)));
5011 else if (code == GT_EXPR || code == GE_EXPR)
5015 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5016 real_convert (&c2, mode, &c2);
5018 if (REAL_VALUE_ISINF (c2))
5020 /* sqrt(x) > y is x == +Inf, when y is very large. */
5021 if (HONOR_INFINITIES (mode))
5022 return fold (build (EQ_EXPR, type, arg,
5023 build_real (TREE_TYPE (arg), c2)));
5025 /* sqrt(x) > y is always false, when y is very large
5026 and we don't care about infinities. */
5027 return omit_one_operand (type,
5028 fold_convert (type, integer_zero_node),
5032 /* sqrt(x) > c is the same as x > c*c. */
5033 return fold (build (code, type, arg,
5034 build_real (TREE_TYPE (arg), c2)));
5036 else if (code == LT_EXPR || code == LE_EXPR)
5040 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5041 real_convert (&c2, mode, &c2);
5043 if (REAL_VALUE_ISINF (c2))
5045 /* sqrt(x) < y is always true, when y is a very large
5046 value and we don't care about NaNs or Infinities. */
5047 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5048 return omit_one_operand (type,
5049 fold_convert (type, integer_one_node),
5052 /* sqrt(x) < y is x != +Inf when y is very large and we
5053 don't care about NaNs. */
5054 if (! HONOR_NANS (mode))
5055 return fold (build (NE_EXPR, type, arg,
5056 build_real (TREE_TYPE (arg), c2)));
5058 /* sqrt(x) < y is x >= 0 when y is very large and we
5059 don't care about Infinities. */
5060 if (! HONOR_INFINITIES (mode))
5061 return fold (build (GE_EXPR, type, arg,
5062 build_real (TREE_TYPE (arg), dconst0)));
5064 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5065 if ((*lang_hooks.decls.global_bindings_p) () != 0
5066 || CONTAINS_PLACEHOLDER_P (arg))
5069 arg = save_expr (arg);
5070 return fold (build (TRUTH_ANDIF_EXPR, type,
5071 fold (build (GE_EXPR, type, arg,
5072 build_real (TREE_TYPE (arg),
5074 fold (build (NE_EXPR, type, arg,
5075 build_real (TREE_TYPE (arg),
5079 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5080 if (! HONOR_NANS (mode))
5081 return fold (build (code, type, arg,
5082 build_real (TREE_TYPE (arg), c2)));
5084 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5085 if ((*lang_hooks.decls.global_bindings_p) () == 0
5086 && ! CONTAINS_PLACEHOLDER_P (arg))
5088 arg = save_expr (arg);
5089 return fold (build (TRUTH_ANDIF_EXPR, type,
5090 fold (build (GE_EXPR, type, arg,
5091 build_real (TREE_TYPE (arg),
5093 fold (build (code, type, arg,
5094 build_real (TREE_TYPE (arg),
5103 /* Subroutine of fold() that optimizes comparisons against Infinities,
5104 either +Inf or -Inf.
5106 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5107 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5108 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5110 The function returns the constant folded tree if a simplification
5111 can be made, and NULL_TREE otherwise. */
5114 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5116 enum machine_mode mode;
5117 REAL_VALUE_TYPE max;
5121 mode = TYPE_MODE (TREE_TYPE (arg0));
5123 /* For negative infinity swap the sense of the comparison. */
5124 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5126 code = swap_tree_comparison (code);
5131 /* x > +Inf is always false, if with ignore sNANs. */
5132 if (HONOR_SNANS (mode))
5134 return omit_one_operand (type,
5135 fold_convert (type, integer_zero_node),
5139 /* x <= +Inf is always true, if we don't case about NaNs. */
5140 if (! HONOR_NANS (mode))
5141 return omit_one_operand (type,
5142 fold_convert (type, integer_one_node),
5145 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5146 if ((*lang_hooks.decls.global_bindings_p) () == 0
5147 && ! CONTAINS_PLACEHOLDER_P (arg0))
5149 arg0 = save_expr (arg0);
5150 return fold (build (EQ_EXPR, type, arg0, arg0));
5156 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5157 real_maxval (&max, neg, mode);
5158 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5159 arg0, build_real (TREE_TYPE (arg0), max)));
5162 /* x < +Inf is always equal to x <= DBL_MAX. */
5163 real_maxval (&max, neg, mode);
5164 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5165 arg0, build_real (TREE_TYPE (arg0), max)));
5168 /* x != +Inf is always equal to !(x > DBL_MAX). */
5169 real_maxval (&max, neg, mode);
5170 if (! HONOR_NANS (mode))
5171 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5172 arg0, build_real (TREE_TYPE (arg0), max)));
5173 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5174 arg0, build_real (TREE_TYPE (arg0), max)));
5175 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5184 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5185 equality/inequality test, then return a simplified form of
5186 the test using shifts and logical operations. Otherwise return
5187 NULL. TYPE is the desired result type. */
5190 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5193 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5195 if (code == TRUTH_NOT_EXPR)
5197 code = TREE_CODE (arg0);
5198 if (code != NE_EXPR && code != EQ_EXPR)
5201 /* Extract the arguments of the EQ/NE. */
5202 arg1 = TREE_OPERAND (arg0, 1);
5203 arg0 = TREE_OPERAND (arg0, 0);
5205 /* This requires us to invert the code. */
5206 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5209 /* If this is testing a single bit, we can optimize the test. */
5210 if ((code == NE_EXPR || code == EQ_EXPR)
5211 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5212 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5214 tree inner = TREE_OPERAND (arg0, 0);
5215 tree type = TREE_TYPE (arg0);
5216 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5217 enum machine_mode operand_mode = TYPE_MODE (type);
5219 tree signed_type, unsigned_type, intermediate_type;
5222 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5223 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5224 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5225 if (arg00 != NULL_TREE)
5227 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5228 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5229 fold_convert (stype, arg00),
5230 fold_convert (stype, integer_zero_node)));
5233 /* At this point, we know that arg0 is not testing the sign bit. */
5234 if (TYPE_PRECISION (type) - 1 == bitnum)
5237 /* Otherwise we have (A & C) != 0 where C is a single bit,
5238 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5239 Similarly for (A & C) == 0. */
5241 /* If INNER is a right shift of a constant and it plus BITNUM does
5242 not overflow, adjust BITNUM and INNER. */
5243 if (TREE_CODE (inner) == RSHIFT_EXPR
5244 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5245 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5246 && bitnum < TYPE_PRECISION (type)
5247 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5248 bitnum - TYPE_PRECISION (type)))
5250 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5251 inner = TREE_OPERAND (inner, 0);
5254 /* If we are going to be able to omit the AND below, we must do our
5255 operations as unsigned. If we must use the AND, we have a choice.
5256 Normally unsigned is faster, but for some machines signed is. */
5257 #ifdef LOAD_EXTEND_OP
5258 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5263 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5264 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5265 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5266 inner = fold_convert (intermediate_type, inner);
5269 inner = build (RSHIFT_EXPR, intermediate_type,
5270 inner, size_int (bitnum));
5272 if (code == EQ_EXPR)
5273 inner = build (BIT_XOR_EXPR, intermediate_type,
5274 inner, integer_one_node);
5276 /* Put the AND last so it can combine with more things. */
5277 inner = build (BIT_AND_EXPR, intermediate_type,
5278 inner, integer_one_node);
5280 /* Make sure to return the proper type. */
5281 inner = fold_convert (result_type, inner);
5288 /* Check whether we are allowed to reorder operands arg0 and arg1,
5289 such that the evaluation of arg1 occurs before arg0. */
5292 reorder_operands_p (tree arg0, tree arg1)
5294 if (! flag_evaluation_order)
5296 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5298 return ! TREE_SIDE_EFFECTS (arg0)
5299 && ! TREE_SIDE_EFFECTS (arg1);
5302 /* Test whether it is preferable two swap two operands, ARG0 and
5303 ARG1, for example because ARG0 is an integer constant and ARG1
5304 isn't. If REORDER is true, only recommend swapping if we can
5305 evaluate the operands in reverse order. */
5308 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5310 STRIP_SIGN_NOPS (arg0);
5311 STRIP_SIGN_NOPS (arg1);
5313 if (TREE_CODE (arg1) == INTEGER_CST)
5315 if (TREE_CODE (arg0) == INTEGER_CST)
5318 if (TREE_CODE (arg1) == REAL_CST)
5320 if (TREE_CODE (arg0) == REAL_CST)
5323 if (TREE_CODE (arg1) == COMPLEX_CST)
5325 if (TREE_CODE (arg0) == COMPLEX_CST)
5328 if (TREE_CONSTANT (arg1))
5330 if (TREE_CONSTANT (arg0))
5336 if (reorder && flag_evaluation_order
5337 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5348 /* Perform constant folding and related simplification of EXPR.
5349 The related simplifications include x*1 => x, x*0 => 0, etc.,
5350 and application of the associative law.
5351 NOP_EXPR conversions may be removed freely (as long as we
5352 are careful not to change the C type of the overall expression)
5353 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5354 but we can constant-fold them if they have constant operands. */
5356 #ifdef ENABLE_FOLD_CHECKING
5357 # define fold(x) fold_1 (x)
5358 static tree fold_1 (tree);
5364 tree t = expr, orig_t;
5365 tree t1 = NULL_TREE;
5367 tree type = TREE_TYPE (expr);
5368 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5369 enum tree_code code = TREE_CODE (t);
5370 int kind = TREE_CODE_CLASS (code);
5372 /* WINS will be nonzero when the switch is done
5373 if all operands are constant. */
5376 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5377 Likewise for a SAVE_EXPR that's already been evaluated. */
5378 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5381 /* Return right away if a constant. */
5387 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5391 /* Special case for conversion ops that can have fixed point args. */
5392 arg0 = TREE_OPERAND (t, 0);
5394 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5396 STRIP_SIGN_NOPS (arg0);
5398 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5399 subop = TREE_REALPART (arg0);
5403 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5404 && TREE_CODE (subop) != REAL_CST)
5405 /* Note that TREE_CONSTANT isn't enough:
5406 static var addresses are constant but we can't
5407 do arithmetic on them. */
5410 else if (IS_EXPR_CODE_CLASS (kind))
5412 int len = first_rtl_op (code);
5414 for (i = 0; i < len; i++)
5416 tree op = TREE_OPERAND (t, i);
5420 continue; /* Valid for CALL_EXPR, at least. */
5422 if (kind == '<' || code == RSHIFT_EXPR)
5424 /* Signedness matters here. Perhaps we can refine this
5426 STRIP_SIGN_NOPS (op);
5429 /* Strip any conversions that don't change the mode. */
5432 if (TREE_CODE (op) == COMPLEX_CST)
5433 subop = TREE_REALPART (op);
5437 if (TREE_CODE (subop) != INTEGER_CST
5438 && TREE_CODE (subop) != REAL_CST)
5439 /* Note that TREE_CONSTANT isn't enough:
5440 static var addresses are constant but we can't
5441 do arithmetic on them. */
5451 /* If this is a commutative operation, and ARG0 is a constant, move it
5452 to ARG1 to reduce the number of tests below. */
5453 if (commutative_tree_code (code)
5454 && tree_swap_operands_p (arg0, arg1, true))
5455 return fold (build (code, type, arg1, arg0));
5457 /* Now WINS is set as described above,
5458 ARG0 is the first operand of EXPR,
5459 and ARG1 is the second operand (if it has more than one operand).
5461 First check for cases where an arithmetic operation is applied to a
5462 compound, conditional, or comparison operation. Push the arithmetic
5463 operation inside the compound or conditional to see if any folding
5464 can then be done. Convert comparison to conditional for this purpose.
5465 The also optimizes non-constant cases that used to be done in
5468 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5469 one of the operands is a comparison and the other is a comparison, a
5470 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5471 code below would make the expression more complex. Change it to a
5472 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5473 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5475 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5476 || code == EQ_EXPR || code == NE_EXPR)
5477 && ((truth_value_p (TREE_CODE (arg0))
5478 && (truth_value_p (TREE_CODE (arg1))
5479 || (TREE_CODE (arg1) == BIT_AND_EXPR
5480 && integer_onep (TREE_OPERAND (arg1, 1)))))
5481 || (truth_value_p (TREE_CODE (arg1))
5482 && (truth_value_p (TREE_CODE (arg0))
5483 || (TREE_CODE (arg0) == BIT_AND_EXPR
5484 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5486 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5487 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5491 if (code == EQ_EXPR)
5492 t = invert_truthvalue (t);
5497 if (TREE_CODE_CLASS (code) == '1')
5499 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5500 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5501 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5502 else if (TREE_CODE (arg0) == COND_EXPR)
5504 tree arg01 = TREE_OPERAND (arg0, 1);
5505 tree arg02 = TREE_OPERAND (arg0, 2);
5506 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5507 arg01 = fold (build1 (code, type, arg01));
5508 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5509 arg02 = fold (build1 (code, type, arg02));
5510 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5513 /* If this was a conversion, and all we did was to move into
5514 inside the COND_EXPR, bring it back out. But leave it if
5515 it is a conversion from integer to integer and the
5516 result precision is no wider than a word since such a
5517 conversion is cheap and may be optimized away by combine,
5518 while it couldn't if it were outside the COND_EXPR. Then return
5519 so we don't get into an infinite recursion loop taking the
5520 conversion out and then back in. */
5522 if ((code == NOP_EXPR || code == CONVERT_EXPR
5523 || code == NON_LVALUE_EXPR)
5524 && TREE_CODE (t) == COND_EXPR
5525 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5526 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5527 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5528 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5529 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5530 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5531 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5533 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5534 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5535 t = build1 (code, type,
5537 TREE_TYPE (TREE_OPERAND
5538 (TREE_OPERAND (t, 1), 0)),
5539 TREE_OPERAND (t, 0),
5540 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5541 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5544 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5545 return fold (build (COND_EXPR, type, arg0,
5546 fold (build1 (code, type, integer_one_node)),
5547 fold (build1 (code, type, integer_zero_node))));
5549 else if (TREE_CODE_CLASS (code) == '<'
5550 && TREE_CODE (arg0) == COMPOUND_EXPR)
5551 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5552 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5553 else if (TREE_CODE_CLASS (code) == '<'
5554 && TREE_CODE (arg1) == COMPOUND_EXPR)
5555 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5556 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5557 else if (TREE_CODE_CLASS (code) == '2'
5558 || TREE_CODE_CLASS (code) == '<')
5560 if (TREE_CODE (arg1) == COMPOUND_EXPR
5561 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5562 && ! TREE_SIDE_EFFECTS (arg0))
5563 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5564 fold (build (code, type,
5565 arg0, TREE_OPERAND (arg1, 1))));
5566 else if ((TREE_CODE (arg1) == COND_EXPR
5567 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5568 && TREE_CODE_CLASS (code) != '<'))
5569 && (TREE_CODE (arg0) != COND_EXPR
5570 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5571 && (! TREE_SIDE_EFFECTS (arg0)
5572 || ((*lang_hooks.decls.global_bindings_p) () == 0
5573 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5575 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5576 /*cond_first_p=*/0);
5577 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5578 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5579 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5580 else if ((TREE_CODE (arg0) == COND_EXPR
5581 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5582 && TREE_CODE_CLASS (code) != '<'))
5583 && (TREE_CODE (arg1) != COND_EXPR
5584 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5585 && (! TREE_SIDE_EFFECTS (arg1)
5586 || ((*lang_hooks.decls.global_bindings_p) () == 0
5587 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5589 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5590 /*cond_first_p=*/1);
5604 return fold (DECL_INITIAL (t));
5609 case FIX_TRUNC_EXPR:
5611 case FIX_FLOOR_EXPR:
5612 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5613 return TREE_OPERAND (t, 0);
5615 /* Handle cases of two conversions in a row. */
5616 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5617 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5619 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5620 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5621 tree final_type = TREE_TYPE (t);
5622 int inside_int = INTEGRAL_TYPE_P (inside_type);
5623 int inside_ptr = POINTER_TYPE_P (inside_type);
5624 int inside_float = FLOAT_TYPE_P (inside_type);
5625 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5626 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5627 int inter_int = INTEGRAL_TYPE_P (inter_type);
5628 int inter_ptr = POINTER_TYPE_P (inter_type);
5629 int inter_float = FLOAT_TYPE_P (inter_type);
5630 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5631 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5632 int final_int = INTEGRAL_TYPE_P (final_type);
5633 int final_ptr = POINTER_TYPE_P (final_type);
5634 int final_float = FLOAT_TYPE_P (final_type);
5635 unsigned int final_prec = TYPE_PRECISION (final_type);
5636 int final_unsignedp = TREE_UNSIGNED (final_type);
5638 /* In addition to the cases of two conversions in a row
5639 handled below, if we are converting something to its own
5640 type via an object of identical or wider precision, neither
5641 conversion is needed. */
5642 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5643 && ((inter_int && final_int) || (inter_float && final_float))
5644 && inter_prec >= final_prec)
5645 return fold (build1 (code, final_type,
5646 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5648 /* Likewise, if the intermediate and final types are either both
5649 float or both integer, we don't need the middle conversion if
5650 it is wider than the final type and doesn't change the signedness
5651 (for integers). Avoid this if the final type is a pointer
5652 since then we sometimes need the inner conversion. Likewise if
5653 the outer has a precision not equal to the size of its mode. */
5654 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5655 || (inter_float && inside_float))
5656 && inter_prec >= inside_prec
5657 && (inter_float || inter_unsignedp == inside_unsignedp)
5658 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5659 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5661 return fold (build1 (code, final_type,
5662 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5664 /* If we have a sign-extension of a zero-extended value, we can
5665 replace that by a single zero-extension. */
5666 if (inside_int && inter_int && final_int
5667 && inside_prec < inter_prec && inter_prec < final_prec
5668 && inside_unsignedp && !inter_unsignedp)
5669 return fold (build1 (code, final_type,
5670 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5672 /* Two conversions in a row are not needed unless:
5673 - some conversion is floating-point (overstrict for now), or
5674 - the intermediate type is narrower than both initial and
5676 - the intermediate type and innermost type differ in signedness,
5677 and the outermost type is wider than the intermediate, or
5678 - the initial type is a pointer type and the precisions of the
5679 intermediate and final types differ, or
5680 - the final type is a pointer type and the precisions of the
5681 initial and intermediate types differ. */
5682 if (! inside_float && ! inter_float && ! final_float
5683 && (inter_prec > inside_prec || inter_prec > final_prec)
5684 && ! (inside_int && inter_int
5685 && inter_unsignedp != inside_unsignedp
5686 && inter_prec < final_prec)
5687 && ((inter_unsignedp && inter_prec > inside_prec)
5688 == (final_unsignedp && final_prec > inter_prec))
5689 && ! (inside_ptr && inter_prec != final_prec)
5690 && ! (final_ptr && inside_prec != inter_prec)
5691 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5692 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5694 return fold (build1 (code, final_type,
5695 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5698 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5699 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5700 /* Detect assigning a bitfield. */
5701 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5702 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5704 /* Don't leave an assignment inside a conversion
5705 unless assigning a bitfield. */
5706 tree prev = TREE_OPERAND (t, 0);
5709 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5710 /* First do the assignment, then return converted constant. */
5711 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5716 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5717 constants (if x has signed type, the sign bit cannot be set
5718 in c). This folds extension into the BIT_AND_EXPR. */
5719 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5720 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5721 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5722 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5724 tree and = TREE_OPERAND (t, 0);
5725 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5728 if (TREE_UNSIGNED (TREE_TYPE (and))
5729 || (TYPE_PRECISION (TREE_TYPE (t))
5730 <= TYPE_PRECISION (TREE_TYPE (and))))
5732 else if (TYPE_PRECISION (TREE_TYPE (and1))
5733 <= HOST_BITS_PER_WIDE_INT
5734 && host_integerp (and1, 1))
5736 unsigned HOST_WIDE_INT cst;
5738 cst = tree_low_cst (and1, 1);
5739 cst &= (HOST_WIDE_INT) -1
5740 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5741 change = (cst == 0);
5742 #ifdef LOAD_EXTEND_OP
5744 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5747 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5748 and0 = fold_convert (uns, and0);
5749 and1 = fold_convert (uns, and1);
5754 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5755 fold_convert (TREE_TYPE (t), and0),
5756 fold_convert (TREE_TYPE (t), and1)));
5759 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5760 return tem ? tem : t;
5762 case VIEW_CONVERT_EXPR:
5763 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5764 return build1 (VIEW_CONVERT_EXPR, type,
5765 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5769 if (TREE_CODE (arg0) == CONSTRUCTOR
5770 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5772 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5779 if (TREE_CONSTANT (t) != wins)
5783 TREE_CONSTANT (t) = wins;
5788 if (negate_expr_p (arg0))
5789 return fold_convert (type, negate_expr (arg0));
5795 if (TREE_CODE (arg0) == INTEGER_CST)
5797 /* If the value is unsigned, then the absolute value is
5798 the same as the ordinary value. */
5799 if (TREE_UNSIGNED (type))
5801 /* Similarly, if the value is non-negative. */
5802 else if (INT_CST_LT (integer_minus_one_node, arg0))
5804 /* If the value is negative, then the absolute value is
5808 unsigned HOST_WIDE_INT low;
5810 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5811 TREE_INT_CST_HIGH (arg0),
5813 t = build_int_2 (low, high);
5814 TREE_TYPE (t) = type;
5816 = (TREE_OVERFLOW (arg0)
5817 | force_fit_type (t, overflow));
5818 TREE_CONSTANT_OVERFLOW (t)
5819 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5822 else if (TREE_CODE (arg0) == REAL_CST)
5824 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5825 t = build_real (type,
5826 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5829 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5830 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5831 /* Convert fabs((double)float) into (double)fabsf(float). */
5832 else if (TREE_CODE (arg0) == NOP_EXPR
5833 && TREE_CODE (type) == REAL_TYPE)
5835 tree targ0 = strip_float_extensions (arg0);
5837 return fold_convert (type, fold (build1 (ABS_EXPR,
5841 else if (tree_expr_nonnegative_p (arg0))
5846 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5847 return fold_convert (type, arg0);
5848 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5849 return build (COMPLEX_EXPR, type,
5850 TREE_OPERAND (arg0, 0),
5851 negate_expr (TREE_OPERAND (arg0, 1)));
5852 else if (TREE_CODE (arg0) == COMPLEX_CST)
5853 return build_complex (type, TREE_REALPART (arg0),
5854 negate_expr (TREE_IMAGPART (arg0)));
5855 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5856 return fold (build (TREE_CODE (arg0), type,
5857 fold (build1 (CONJ_EXPR, type,
5858 TREE_OPERAND (arg0, 0))),
5859 fold (build1 (CONJ_EXPR,
5860 type, TREE_OPERAND (arg0, 1)))));
5861 else if (TREE_CODE (arg0) == CONJ_EXPR)
5862 return TREE_OPERAND (arg0, 0);
5868 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5869 ~ TREE_INT_CST_HIGH (arg0));
5870 TREE_TYPE (t) = type;
5871 force_fit_type (t, 0);
5872 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5873 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5875 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5876 return TREE_OPERAND (arg0, 0);
5880 /* A + (-B) -> A - B */
5881 if (TREE_CODE (arg1) == NEGATE_EXPR)
5882 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5883 /* (-A) + B -> B - A */
5884 if (TREE_CODE (arg0) == NEGATE_EXPR)
5885 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5886 else if (! FLOAT_TYPE_P (type))
5888 if (integer_zerop (arg1))
5889 return non_lvalue (fold_convert (type, arg0));
5891 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5892 with a constant, and the two constants have no bits in common,
5893 we should treat this as a BIT_IOR_EXPR since this may produce more
5895 if (TREE_CODE (arg0) == BIT_AND_EXPR
5896 && TREE_CODE (arg1) == BIT_AND_EXPR
5897 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5898 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5899 && integer_zerop (const_binop (BIT_AND_EXPR,
5900 TREE_OPERAND (arg0, 1),
5901 TREE_OPERAND (arg1, 1), 0)))
5903 code = BIT_IOR_EXPR;
5907 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5908 (plus (plus (mult) (mult)) (foo)) so that we can
5909 take advantage of the factoring cases below. */
5910 if ((TREE_CODE (arg0) == PLUS_EXPR
5911 && TREE_CODE (arg1) == MULT_EXPR)
5912 || (TREE_CODE (arg1) == PLUS_EXPR
5913 && TREE_CODE (arg0) == MULT_EXPR))
5915 tree parg0, parg1, parg, marg;
5917 if (TREE_CODE (arg0) == PLUS_EXPR)
5918 parg = arg0, marg = arg1;
5920 parg = arg1, marg = arg0;
5921 parg0 = TREE_OPERAND (parg, 0);
5922 parg1 = TREE_OPERAND (parg, 1);
5926 if (TREE_CODE (parg0) == MULT_EXPR
5927 && TREE_CODE (parg1) != MULT_EXPR)
5928 return fold (build (PLUS_EXPR, type,
5929 fold (build (PLUS_EXPR, type,
5930 fold_convert (type, parg0),
5931 fold_convert (type, marg))),
5932 fold_convert (type, parg1)));
5933 if (TREE_CODE (parg0) != MULT_EXPR
5934 && TREE_CODE (parg1) == MULT_EXPR)
5935 return fold (build (PLUS_EXPR, type,
5936 fold (build (PLUS_EXPR, type,
5937 fold_convert (type, parg1),
5938 fold_convert (type, marg))),
5939 fold_convert (type, parg0)));
5942 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5944 tree arg00, arg01, arg10, arg11;
5945 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5947 /* (A * C) + (B * C) -> (A+B) * C.
5948 We are most concerned about the case where C is a constant,
5949 but other combinations show up during loop reduction. Since
5950 it is not difficult, try all four possibilities. */
5952 arg00 = TREE_OPERAND (arg0, 0);
5953 arg01 = TREE_OPERAND (arg0, 1);
5954 arg10 = TREE_OPERAND (arg1, 0);
5955 arg11 = TREE_OPERAND (arg1, 1);
5958 if (operand_equal_p (arg01, arg11, 0))
5959 same = arg01, alt0 = arg00, alt1 = arg10;
5960 else if (operand_equal_p (arg00, arg10, 0))
5961 same = arg00, alt0 = arg01, alt1 = arg11;
5962 else if (operand_equal_p (arg00, arg11, 0))
5963 same = arg00, alt0 = arg01, alt1 = arg10;
5964 else if (operand_equal_p (arg01, arg10, 0))
5965 same = arg01, alt0 = arg00, alt1 = arg11;
5967 /* No identical multiplicands; see if we can find a common
5968 power-of-two factor in non-power-of-two multiplies. This
5969 can help in multi-dimensional array access. */
5970 else if (TREE_CODE (arg01) == INTEGER_CST
5971 && TREE_CODE (arg11) == INTEGER_CST
5972 && TREE_INT_CST_HIGH (arg01) == 0
5973 && TREE_INT_CST_HIGH (arg11) == 0)
5975 HOST_WIDE_INT int01, int11, tmp;
5976 int01 = TREE_INT_CST_LOW (arg01);
5977 int11 = TREE_INT_CST_LOW (arg11);
5979 /* Move min of absolute values to int11. */
5980 if ((int01 >= 0 ? int01 : -int01)
5981 < (int11 >= 0 ? int11 : -int11))
5983 tmp = int01, int01 = int11, int11 = tmp;
5984 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5985 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5988 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5990 alt0 = fold (build (MULT_EXPR, type, arg00,
5991 build_int_2 (int01 / int11, 0)));
5998 return fold (build (MULT_EXPR, type,
5999 fold (build (PLUS_EXPR, type, alt0, alt1)),
6005 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6006 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6007 return non_lvalue (fold_convert (type, arg0));
6009 /* Likewise if the operands are reversed. */
6010 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6011 return non_lvalue (fold_convert (type, arg1));
6013 /* Convert x+x into x*2.0. */
6014 if (operand_equal_p (arg0, arg1, 0)
6015 && SCALAR_FLOAT_TYPE_P (type))
6016 return fold (build (MULT_EXPR, type, arg0,
6017 build_real (type, dconst2)));
6019 /* Convert x*c+x into x*(c+1). */
6020 if (flag_unsafe_math_optimizations
6021 && TREE_CODE (arg0) == MULT_EXPR
6022 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6023 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6024 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6028 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6029 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6030 return fold (build (MULT_EXPR, type, arg1,
6031 build_real (type, c)));
6034 /* Convert x+x*c into x*(c+1). */
6035 if (flag_unsafe_math_optimizations
6036 && TREE_CODE (arg1) == MULT_EXPR
6037 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6038 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6039 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6043 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6044 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6045 return fold (build (MULT_EXPR, type, arg0,
6046 build_real (type, c)));
6049 /* Convert x*c1+x*c2 into x*(c1+c2). */
6050 if (flag_unsafe_math_optimizations
6051 && TREE_CODE (arg0) == MULT_EXPR
6052 && TREE_CODE (arg1) == MULT_EXPR
6053 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6054 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6055 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6056 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6057 && operand_equal_p (TREE_OPERAND (arg0, 0),
6058 TREE_OPERAND (arg1, 0), 0))
6060 REAL_VALUE_TYPE c1, c2;
6062 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6063 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6064 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6065 return fold (build (MULT_EXPR, type,
6066 TREE_OPERAND (arg0, 0),
6067 build_real (type, c1)));
6072 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6073 is a rotate of A by C1 bits. */
6074 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6075 is a rotate of A by B bits. */
6077 enum tree_code code0, code1;
6078 code0 = TREE_CODE (arg0);
6079 code1 = TREE_CODE (arg1);
6080 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6081 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6082 && operand_equal_p (TREE_OPERAND (arg0, 0),
6083 TREE_OPERAND (arg1, 0), 0)
6084 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6086 tree tree01, tree11;
6087 enum tree_code code01, code11;
6089 tree01 = TREE_OPERAND (arg0, 1);
6090 tree11 = TREE_OPERAND (arg1, 1);
6091 STRIP_NOPS (tree01);
6092 STRIP_NOPS (tree11);
6093 code01 = TREE_CODE (tree01);
6094 code11 = TREE_CODE (tree11);
6095 if (code01 == INTEGER_CST
6096 && code11 == INTEGER_CST
6097 && TREE_INT_CST_HIGH (tree01) == 0
6098 && TREE_INT_CST_HIGH (tree11) == 0
6099 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6100 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6101 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6102 code0 == LSHIFT_EXPR ? tree01 : tree11);
6103 else if (code11 == MINUS_EXPR)
6105 tree tree110, tree111;
6106 tree110 = TREE_OPERAND (tree11, 0);
6107 tree111 = TREE_OPERAND (tree11, 1);
6108 STRIP_NOPS (tree110);
6109 STRIP_NOPS (tree111);
6110 if (TREE_CODE (tree110) == INTEGER_CST
6111 && 0 == compare_tree_int (tree110,
6113 (TREE_TYPE (TREE_OPERAND
6115 && operand_equal_p (tree01, tree111, 0))
6116 return build ((code0 == LSHIFT_EXPR
6119 type, TREE_OPERAND (arg0, 0), tree01);
6121 else if (code01 == MINUS_EXPR)
6123 tree tree010, tree011;
6124 tree010 = TREE_OPERAND (tree01, 0);
6125 tree011 = TREE_OPERAND (tree01, 1);
6126 STRIP_NOPS (tree010);
6127 STRIP_NOPS (tree011);
6128 if (TREE_CODE (tree010) == INTEGER_CST
6129 && 0 == compare_tree_int (tree010,
6131 (TREE_TYPE (TREE_OPERAND
6133 && operand_equal_p (tree11, tree011, 0))
6134 return build ((code0 != LSHIFT_EXPR
6137 type, TREE_OPERAND (arg0, 0), tree11);
6143 /* In most languages, can't associate operations on floats through
6144 parentheses. Rather than remember where the parentheses were, we
6145 don't associate floats at all, unless the user has specified
6146 -funsafe-math-optimizations. */
6149 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6151 tree var0, con0, lit0, minus_lit0;
6152 tree var1, con1, lit1, minus_lit1;
6154 /* Split both trees into variables, constants, and literals. Then
6155 associate each group together, the constants with literals,
6156 then the result with variables. This increases the chances of
6157 literals being recombined later and of generating relocatable
6158 expressions for the sum of a constant and literal. */
6159 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6160 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6161 code == MINUS_EXPR);
6163 /* Only do something if we found more than two objects. Otherwise,
6164 nothing has changed and we risk infinite recursion. */
6165 if (2 < ((var0 != 0) + (var1 != 0)
6166 + (con0 != 0) + (con1 != 0)
6167 + (lit0 != 0) + (lit1 != 0)
6168 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6170 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6171 if (code == MINUS_EXPR)
6174 var0 = associate_trees (var0, var1, code, type);
6175 con0 = associate_trees (con0, con1, code, type);
6176 lit0 = associate_trees (lit0, lit1, code, type);
6177 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6179 /* Preserve the MINUS_EXPR if the negative part of the literal is
6180 greater than the positive part. Otherwise, the multiplicative
6181 folding code (i.e extract_muldiv) may be fooled in case
6182 unsigned constants are subtracted, like in the following
6183 example: ((X*2 + 4) - 8U)/2. */
6184 if (minus_lit0 && lit0)
6186 if (TREE_CODE (lit0) == INTEGER_CST
6187 && TREE_CODE (minus_lit0) == INTEGER_CST
6188 && tree_int_cst_lt (lit0, minus_lit0))
6190 minus_lit0 = associate_trees (minus_lit0, lit0,
6196 lit0 = associate_trees (lit0, minus_lit0,
6204 return fold_convert (type,
6205 associate_trees (var0, minus_lit0,
6209 con0 = associate_trees (con0, minus_lit0,
6211 return fold_convert (type,
6212 associate_trees (var0, con0,
6217 con0 = associate_trees (con0, lit0, code, type);
6218 return fold_convert (type, associate_trees (var0, con0,
6225 t1 = const_binop (code, arg0, arg1, 0);
6226 if (t1 != NULL_TREE)
6228 /* The return value should always have
6229 the same type as the original expression. */
6230 if (TREE_TYPE (t1) != TREE_TYPE (t))
6231 t1 = fold_convert (TREE_TYPE (t), t1);
6238 /* A - (-B) -> A + B */
6239 if (TREE_CODE (arg1) == NEGATE_EXPR)
6240 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6241 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6242 if (TREE_CODE (arg0) == NEGATE_EXPR
6243 && (FLOAT_TYPE_P (type)
6244 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6245 && negate_expr_p (arg1)
6246 && reorder_operands_p (arg0, arg1))
6247 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6248 TREE_OPERAND (arg0, 0)));
6250 if (! FLOAT_TYPE_P (type))
6252 if (! wins && integer_zerop (arg0))
6253 return negate_expr (fold_convert (type, arg1));
6254 if (integer_zerop (arg1))
6255 return non_lvalue (fold_convert (type, arg0));
6257 /* Fold A - (A & B) into ~B & A. */
6258 if (!TREE_SIDE_EFFECTS (arg0)
6259 && TREE_CODE (arg1) == BIT_AND_EXPR)
6261 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6262 return fold (build (BIT_AND_EXPR, type,
6263 fold (build1 (BIT_NOT_EXPR, type,
6264 TREE_OPERAND (arg1, 0))),
6266 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6267 return fold (build (BIT_AND_EXPR, type,
6268 fold (build1 (BIT_NOT_EXPR, type,
6269 TREE_OPERAND (arg1, 1))),
6273 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6274 any power of 2 minus 1. */
6275 if (TREE_CODE (arg0) == BIT_AND_EXPR
6276 && TREE_CODE (arg1) == BIT_AND_EXPR
6277 && operand_equal_p (TREE_OPERAND (arg0, 0),
6278 TREE_OPERAND (arg1, 0), 0))
6280 tree mask0 = TREE_OPERAND (arg0, 1);
6281 tree mask1 = TREE_OPERAND (arg1, 1);
6282 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6284 if (operand_equal_p (tem, mask1, 0))
6286 tem = fold (build (BIT_XOR_EXPR, type,
6287 TREE_OPERAND (arg0, 0), mask1));
6288 return fold (build (MINUS_EXPR, type, tem, mask1));
6293 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6294 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6295 return non_lvalue (fold_convert (type, arg0));
6297 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6298 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6299 (-ARG1 + ARG0) reduces to -ARG1. */
6300 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6301 return negate_expr (fold_convert (type, arg1));
6303 /* Fold &x - &x. This can happen from &x.foo - &x.
6304 This is unsafe for certain floats even in non-IEEE formats.
6305 In IEEE, it is unsafe because it does wrong for NaNs.
6306 Also note that operand_equal_p is always false if an operand
6309 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6310 && operand_equal_p (arg0, arg1, 0))
6311 return fold_convert (type, integer_zero_node);
6313 /* A - B -> A + (-B) if B is easily negatable. */
6314 if (!wins && negate_expr_p (arg1)
6315 && (FLOAT_TYPE_P (type)
6316 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6317 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6319 if (TREE_CODE (arg0) == MULT_EXPR
6320 && TREE_CODE (arg1) == MULT_EXPR
6321 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6323 /* (A * C) - (B * C) -> (A-B) * C. */
6324 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6325 TREE_OPERAND (arg1, 1), 0))
6326 return fold (build (MULT_EXPR, type,
6327 fold (build (MINUS_EXPR, type,
6328 TREE_OPERAND (arg0, 0),
6329 TREE_OPERAND (arg1, 0))),
6330 TREE_OPERAND (arg0, 1)));
6331 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6332 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6333 TREE_OPERAND (arg1, 0), 0))
6334 return fold (build (MULT_EXPR, type,
6335 TREE_OPERAND (arg0, 0),
6336 fold (build (MINUS_EXPR, type,
6337 TREE_OPERAND (arg0, 1),
6338 TREE_OPERAND (arg1, 1)))));
6344 /* (-A) * (-B) -> A * B */
6345 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6346 return fold (build (MULT_EXPR, type,
6347 TREE_OPERAND (arg0, 0),
6348 negate_expr (arg1)));
6349 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6350 return fold (build (MULT_EXPR, type,
6352 TREE_OPERAND (arg1, 0)));
6354 if (! FLOAT_TYPE_P (type))
6356 if (integer_zerop (arg1))
6357 return omit_one_operand (type, arg1, arg0);
6358 if (integer_onep (arg1))
6359 return non_lvalue (fold_convert (type, arg0));
6361 /* (a * (1 << b)) is (a << b) */
6362 if (TREE_CODE (arg1) == LSHIFT_EXPR
6363 && integer_onep (TREE_OPERAND (arg1, 0)))
6364 return fold (build (LSHIFT_EXPR, type, arg0,
6365 TREE_OPERAND (arg1, 1)));
6366 if (TREE_CODE (arg0) == LSHIFT_EXPR
6367 && integer_onep (TREE_OPERAND (arg0, 0)))
6368 return fold (build (LSHIFT_EXPR, type, arg1,
6369 TREE_OPERAND (arg0, 1)));
6371 if (TREE_CODE (arg1) == INTEGER_CST
6372 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6373 fold_convert (type, arg1),
6375 return fold_convert (type, tem);
6380 /* Maybe fold x * 0 to 0. The expressions aren't the same
6381 when x is NaN, since x * 0 is also NaN. Nor are they the
6382 same in modes with signed zeros, since multiplying a
6383 negative value by 0 gives -0, not +0. */
6384 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6385 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6386 && real_zerop (arg1))
6387 return omit_one_operand (type, arg1, arg0);
6388 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6389 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6390 && real_onep (arg1))
6391 return non_lvalue (fold_convert (type, arg0));
6393 /* Transform x * -1.0 into -x. */
6394 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6395 && real_minus_onep (arg1))
6396 return fold (build1 (NEGATE_EXPR, type, arg0));
6398 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6399 if (flag_unsafe_math_optimizations
6400 && TREE_CODE (arg0) == RDIV_EXPR
6401 && TREE_CODE (arg1) == REAL_CST
6402 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6404 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6407 return fold (build (RDIV_EXPR, type, tem,
6408 TREE_OPERAND (arg0, 1)));
6411 if (flag_unsafe_math_optimizations)
6413 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6414 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6416 /* Optimizations of sqrt(...)*sqrt(...). */
6417 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6418 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6419 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6421 tree sqrtfn, arg, arglist;
6422 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6423 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6425 /* Optimize sqrt(x)*sqrt(x) as x. */
6426 if (operand_equal_p (arg00, arg10, 0)
6427 && ! HONOR_SNANS (TYPE_MODE (type)))
6430 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6431 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6432 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6433 arglist = build_tree_list (NULL_TREE, arg);
6434 return build_function_call_expr (sqrtfn, arglist);
6437 /* Optimize expN(x)*expN(y) as expN(x+y). */
6438 if (fcode0 == fcode1
6439 && (fcode0 == BUILT_IN_EXP
6440 || fcode0 == BUILT_IN_EXPF
6441 || fcode0 == BUILT_IN_EXPL
6442 || fcode0 == BUILT_IN_EXP2
6443 || fcode0 == BUILT_IN_EXP2F
6444 || fcode0 == BUILT_IN_EXP2L
6445 || fcode0 == BUILT_IN_EXP10
6446 || fcode0 == BUILT_IN_EXP10F
6447 || fcode0 == BUILT_IN_EXP10L
6448 || fcode0 == BUILT_IN_POW10
6449 || fcode0 == BUILT_IN_POW10F
6450 || fcode0 == BUILT_IN_POW10L))
6452 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6453 tree arg = build (PLUS_EXPR, type,
6454 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6455 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6456 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6457 return build_function_call_expr (expfn, arglist);
6460 /* Optimizations of pow(...)*pow(...). */
6461 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6462 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6463 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6465 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6466 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6468 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6469 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6472 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6473 if (operand_equal_p (arg01, arg11, 0))
6475 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6476 tree arg = build (MULT_EXPR, type, arg00, arg10);
6477 tree arglist = tree_cons (NULL_TREE, fold (arg),
6478 build_tree_list (NULL_TREE,
6480 return build_function_call_expr (powfn, arglist);
6483 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6484 if (operand_equal_p (arg00, arg10, 0))
6486 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6487 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6488 tree arglist = tree_cons (NULL_TREE, arg00,
6489 build_tree_list (NULL_TREE,
6491 return build_function_call_expr (powfn, arglist);
6495 /* Optimize tan(x)*cos(x) as sin(x). */
6496 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6497 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6498 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6499 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6500 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6501 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6502 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6503 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6511 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6515 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6519 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6525 if (sinfn != NULL_TREE)
6526 return build_function_call_expr (sinfn,
6527 TREE_OPERAND (arg0, 1));
6530 /* Optimize x*pow(x,c) as pow(x,c+1). */
6531 if (fcode1 == BUILT_IN_POW
6532 || fcode1 == BUILT_IN_POWF
6533 || fcode1 == BUILT_IN_POWL)
6535 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6536 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6538 if (TREE_CODE (arg11) == REAL_CST
6539 && ! TREE_CONSTANT_OVERFLOW (arg11)
6540 && operand_equal_p (arg0, arg10, 0))
6542 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6546 c = TREE_REAL_CST (arg11);
6547 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6548 arg = build_real (type, c);
6549 arglist = build_tree_list (NULL_TREE, arg);
6550 arglist = tree_cons (NULL_TREE, arg0, arglist);
6551 return build_function_call_expr (powfn, arglist);
6555 /* Optimize pow(x,c)*x as pow(x,c+1). */
6556 if (fcode0 == BUILT_IN_POW
6557 || fcode0 == BUILT_IN_POWF
6558 || fcode0 == BUILT_IN_POWL)
6560 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6561 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6563 if (TREE_CODE (arg01) == REAL_CST
6564 && ! TREE_CONSTANT_OVERFLOW (arg01)
6565 && operand_equal_p (arg1, arg00, 0))
6567 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6571 c = TREE_REAL_CST (arg01);
6572 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6573 arg = build_real (type, c);
6574 arglist = build_tree_list (NULL_TREE, arg);
6575 arglist = tree_cons (NULL_TREE, arg1, arglist);
6576 return build_function_call_expr (powfn, arglist);
6580 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6582 && operand_equal_p (arg0, arg1, 0))
6586 if (type == double_type_node)
6587 powfn = implicit_built_in_decls[BUILT_IN_POW];
6588 else if (type == float_type_node)
6589 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6590 else if (type == long_double_type_node)
6591 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6597 tree arg = build_real (type, dconst2);
6598 tree arglist = build_tree_list (NULL_TREE, arg);
6599 arglist = tree_cons (NULL_TREE, arg0, arglist);
6600 return build_function_call_expr (powfn, arglist);
6609 if (integer_all_onesp (arg1))
6610 return omit_one_operand (type, arg1, arg0);
6611 if (integer_zerop (arg1))
6612 return non_lvalue (fold_convert (type, arg0));
6613 t1 = distribute_bit_expr (code, type, arg0, arg1);
6614 if (t1 != NULL_TREE)
6617 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6619 This results in more efficient code for machines without a NAND
6620 instruction. Combine will canonicalize to the first form
6621 which will allow use of NAND instructions provided by the
6622 backend if they exist. */
6623 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6624 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6626 return fold (build1 (BIT_NOT_EXPR, type,
6627 build (BIT_AND_EXPR, type,
6628 TREE_OPERAND (arg0, 0),
6629 TREE_OPERAND (arg1, 0))));
6632 /* See if this can be simplified into a rotate first. If that
6633 is unsuccessful continue in the association code. */
6637 if (integer_zerop (arg1))
6638 return non_lvalue (fold_convert (type, arg0));
6639 if (integer_all_onesp (arg1))
6640 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6642 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6643 with a constant, and the two constants have no bits in common,
6644 we should treat this as a BIT_IOR_EXPR since this may produce more
6646 if (TREE_CODE (arg0) == BIT_AND_EXPR
6647 && TREE_CODE (arg1) == BIT_AND_EXPR
6648 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6649 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6650 && integer_zerop (const_binop (BIT_AND_EXPR,
6651 TREE_OPERAND (arg0, 1),
6652 TREE_OPERAND (arg1, 1), 0)))
6654 code = BIT_IOR_EXPR;
6658 /* See if this can be simplified into a rotate first. If that
6659 is unsuccessful continue in the association code. */
6663 if (integer_all_onesp (arg1))
6664 return non_lvalue (fold_convert (type, arg0));
6665 if (integer_zerop (arg1))
6666 return omit_one_operand (type, arg1, arg0);
6667 t1 = distribute_bit_expr (code, type, arg0, arg1);
6668 if (t1 != NULL_TREE)
6670 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6671 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6672 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6675 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6677 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6678 && (~TREE_INT_CST_LOW (arg1)
6679 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6680 return fold_convert (type, TREE_OPERAND (arg0, 0));
6683 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6685 This results in more efficient code for machines without a NOR
6686 instruction. Combine will canonicalize to the first form
6687 which will allow use of NOR instructions provided by the
6688 backend if they exist. */
6689 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6690 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6692 return fold (build1 (BIT_NOT_EXPR, type,
6693 build (BIT_IOR_EXPR, type,
6694 TREE_OPERAND (arg0, 0),
6695 TREE_OPERAND (arg1, 0))));
6701 /* Don't touch a floating-point divide by zero unless the mode
6702 of the constant can represent infinity. */
6703 if (TREE_CODE (arg1) == REAL_CST
6704 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6705 && real_zerop (arg1))
6708 /* (-A) / (-B) -> A / B */
6709 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6710 return fold (build (RDIV_EXPR, type,
6711 TREE_OPERAND (arg0, 0),
6712 negate_expr (arg1)));
6713 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6714 return fold (build (RDIV_EXPR, type,
6716 TREE_OPERAND (arg1, 0)));
6718 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6719 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6720 && real_onep (arg1))
6721 return non_lvalue (fold_convert (type, arg0));
6723 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6724 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6725 && real_minus_onep (arg1))
6726 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6728 /* If ARG1 is a constant, we can convert this to a multiply by the
6729 reciprocal. This does not have the same rounding properties,
6730 so only do this if -funsafe-math-optimizations. We can actually
6731 always safely do it if ARG1 is a power of two, but it's hard to
6732 tell if it is or not in a portable manner. */
6733 if (TREE_CODE (arg1) == REAL_CST)
6735 if (flag_unsafe_math_optimizations
6736 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6738 return fold (build (MULT_EXPR, type, arg0, tem));
6739 /* Find the reciprocal if optimizing and the result is exact. */
6743 r = TREE_REAL_CST (arg1);
6744 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6746 tem = build_real (type, r);
6747 return fold (build (MULT_EXPR, type, arg0, tem));
6751 /* Convert A/B/C to A/(B*C). */
6752 if (flag_unsafe_math_optimizations
6753 && TREE_CODE (arg0) == RDIV_EXPR)
6754 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6755 fold (build (MULT_EXPR, type,
6756 TREE_OPERAND (arg0, 1), arg1))));
6758 /* Convert A/(B/C) to (A/B)*C. */
6759 if (flag_unsafe_math_optimizations
6760 && TREE_CODE (arg1) == RDIV_EXPR)
6761 return fold (build (MULT_EXPR, type,
6762 fold (build (RDIV_EXPR, type, arg0,
6763 TREE_OPERAND (arg1, 0))),
6764 TREE_OPERAND (arg1, 1)));
6766 /* Convert C1/(X*C2) into (C1/C2)/X. */
6767 if (flag_unsafe_math_optimizations
6768 && TREE_CODE (arg1) == MULT_EXPR
6769 && TREE_CODE (arg0) == REAL_CST
6770 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6772 tree tem = const_binop (RDIV_EXPR, arg0,
6773 TREE_OPERAND (arg1, 1), 0);
6775 return fold (build (RDIV_EXPR, type, tem,
6776 TREE_OPERAND (arg1, 0)));
6779 if (flag_unsafe_math_optimizations)
6781 enum built_in_function fcode = builtin_mathfn_code (arg1);
6782 /* Optimize x/expN(y) into x*expN(-y). */
6783 if (fcode == BUILT_IN_EXP
6784 || fcode == BUILT_IN_EXPF
6785 || fcode == BUILT_IN_EXPL
6786 || fcode == BUILT_IN_EXP2
6787 || fcode == BUILT_IN_EXP2F
6788 || fcode == BUILT_IN_EXP2L
6789 || fcode == BUILT_IN_EXP10
6790 || fcode == BUILT_IN_EXP10F
6791 || fcode == BUILT_IN_EXP10L
6792 || fcode == BUILT_IN_POW10
6793 || fcode == BUILT_IN_POW10F
6794 || fcode == BUILT_IN_POW10L)
6796 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6797 tree arg = build1 (NEGATE_EXPR, type,
6798 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6799 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6800 arg1 = build_function_call_expr (expfn, arglist);
6801 return fold (build (MULT_EXPR, type, arg0, arg1));
6804 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6805 if (fcode == BUILT_IN_POW
6806 || fcode == BUILT_IN_POWF
6807 || fcode == BUILT_IN_POWL)
6809 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6810 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6811 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6812 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6813 tree arglist = tree_cons(NULL_TREE, arg10,
6814 build_tree_list (NULL_TREE, neg11));
6815 arg1 = build_function_call_expr (powfn, arglist);
6816 return fold (build (MULT_EXPR, type, arg0, arg1));
6820 if (flag_unsafe_math_optimizations)
6822 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6823 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6825 /* Optimize sin(x)/cos(x) as tan(x). */
6826 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6827 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6828 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6829 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6830 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6834 if (fcode0 == BUILT_IN_SIN)
6835 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6836 else if (fcode0 == BUILT_IN_SINF)
6837 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6838 else if (fcode0 == BUILT_IN_SINL)
6839 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6843 if (tanfn != NULL_TREE)
6844 return build_function_call_expr (tanfn,
6845 TREE_OPERAND (arg0, 1));
6848 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6849 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6850 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6851 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6852 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6853 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6857 if (fcode0 == BUILT_IN_COS)
6858 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6859 else if (fcode0 == BUILT_IN_COSF)
6860 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6861 else if (fcode0 == BUILT_IN_COSL)
6862 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6866 if (tanfn != NULL_TREE)
6868 tree tmp = TREE_OPERAND (arg0, 1);
6869 tmp = build_function_call_expr (tanfn, tmp);
6870 return fold (build (RDIV_EXPR, type,
6871 build_real (type, dconst1),
6876 /* Optimize pow(x,c)/x as pow(x,c-1). */
6877 if (fcode0 == BUILT_IN_POW
6878 || fcode0 == BUILT_IN_POWF
6879 || fcode0 == BUILT_IN_POWL)
6881 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6882 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6883 if (TREE_CODE (arg01) == REAL_CST
6884 && ! TREE_CONSTANT_OVERFLOW (arg01)
6885 && operand_equal_p (arg1, arg00, 0))
6887 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6891 c = TREE_REAL_CST (arg01);
6892 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6893 arg = build_real (type, c);
6894 arglist = build_tree_list (NULL_TREE, arg);
6895 arglist = tree_cons (NULL_TREE, arg1, arglist);
6896 return build_function_call_expr (powfn, arglist);
6902 case TRUNC_DIV_EXPR:
6903 case ROUND_DIV_EXPR:
6904 case FLOOR_DIV_EXPR:
6906 case EXACT_DIV_EXPR:
6907 if (integer_onep (arg1))
6908 return non_lvalue (fold_convert (type, arg0));
6909 if (integer_zerop (arg1))
6912 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6913 operation, EXACT_DIV_EXPR.
6915 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6916 At one time others generated faster code, it's not clear if they do
6917 after the last round to changes to the DIV code in expmed.c. */
6918 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6919 && multiple_of_p (type, arg0, arg1))
6920 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6922 if (TREE_CODE (arg1) == INTEGER_CST
6923 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6925 return fold_convert (type, tem);
6930 case FLOOR_MOD_EXPR:
6931 case ROUND_MOD_EXPR:
6932 case TRUNC_MOD_EXPR:
6933 if (integer_onep (arg1))
6934 return omit_one_operand (type, integer_zero_node, arg0);
6935 if (integer_zerop (arg1))
6938 if (TREE_CODE (arg1) == INTEGER_CST
6939 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6941 return fold_convert (type, tem);
6947 if (integer_all_onesp (arg0))
6948 return omit_one_operand (type, arg0, arg1);
6952 /* Optimize -1 >> x for arithmetic right shifts. */
6953 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6954 return omit_one_operand (type, arg0, arg1);
6955 /* ... fall through ... */
6959 if (integer_zerop (arg1))
6960 return non_lvalue (fold_convert (type, arg0));
6961 if (integer_zerop (arg0))
6962 return omit_one_operand (type, arg0, arg1);
6964 /* Since negative shift count is not well-defined,
6965 don't try to compute it in the compiler. */
6966 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6968 /* Rewrite an LROTATE_EXPR by a constant into an
6969 RROTATE_EXPR by a new constant. */
6970 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6972 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6973 tem = fold_convert (TREE_TYPE (arg1), tem);
6974 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6975 return fold (build (RROTATE_EXPR, type, arg0, tem));
6978 /* If we have a rotate of a bit operation with the rotate count and
6979 the second operand of the bit operation both constant,
6980 permute the two operations. */
6981 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6982 && (TREE_CODE (arg0) == BIT_AND_EXPR
6983 || TREE_CODE (arg0) == BIT_IOR_EXPR
6984 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6985 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6986 return fold (build (TREE_CODE (arg0), type,
6987 fold (build (code, type,
6988 TREE_OPERAND (arg0, 0), arg1)),
6989 fold (build (code, type,
6990 TREE_OPERAND (arg0, 1), arg1))));
6992 /* Two consecutive rotates adding up to the width of the mode can
6994 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6995 && TREE_CODE (arg0) == RROTATE_EXPR
6996 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6997 && TREE_INT_CST_HIGH (arg1) == 0
6998 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6999 && ((TREE_INT_CST_LOW (arg1)
7000 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7001 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7002 return TREE_OPERAND (arg0, 0);
7007 if (operand_equal_p (arg0, arg1, 0))
7008 return omit_one_operand (type, arg0, arg1);
7009 if (INTEGRAL_TYPE_P (type)
7010 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
7011 return omit_one_operand (type, arg1, arg0);
7015 if (operand_equal_p (arg0, arg1, 0))
7016 return omit_one_operand (type, arg0, arg1);
7017 if (INTEGRAL_TYPE_P (type)
7018 && TYPE_MAX_VALUE (type)
7019 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
7020 return omit_one_operand (type, arg1, arg0);
7023 case TRUTH_NOT_EXPR:
7024 /* Note that the operand of this must be an int
7025 and its values must be 0 or 1.
7026 ("true" is a fixed value perhaps depending on the language,
7027 but we don't handle values other than 1 correctly yet.) */
7028 tem = invert_truthvalue (arg0);
7029 /* Avoid infinite recursion. */
7030 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7032 tem = fold_single_bit_test (code, arg0, arg1, type);
7037 return fold_convert (type, tem);
7039 case TRUTH_ANDIF_EXPR:
7040 /* Note that the operands of this must be ints
7041 and their values must be 0 or 1.
7042 ("true" is a fixed value perhaps depending on the language.) */
7043 /* If first arg is constant zero, return it. */
7044 if (integer_zerop (arg0))
7045 return fold_convert (type, arg0);
7046 case TRUTH_AND_EXPR:
7047 /* If either arg is constant true, drop it. */
7048 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7049 return non_lvalue (fold_convert (type, arg1));
7050 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7051 /* Preserve sequence points. */
7052 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7053 return non_lvalue (fold_convert (type, arg0));
7054 /* If second arg is constant zero, result is zero, but first arg
7055 must be evaluated. */
7056 if (integer_zerop (arg1))
7057 return omit_one_operand (type, arg1, arg0);
7058 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7059 case will be handled here. */
7060 if (integer_zerop (arg0))
7061 return omit_one_operand (type, arg0, arg1);
7064 /* We only do these simplifications if we are optimizing. */
7068 /* Check for things like (A || B) && (A || C). We can convert this
7069 to A || (B && C). Note that either operator can be any of the four
7070 truth and/or operations and the transformation will still be
7071 valid. Also note that we only care about order for the
7072 ANDIF and ORIF operators. If B contains side effects, this
7073 might change the truth-value of A. */
7074 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7075 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7076 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7077 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7078 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7079 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7081 tree a00 = TREE_OPERAND (arg0, 0);
7082 tree a01 = TREE_OPERAND (arg0, 1);
7083 tree a10 = TREE_OPERAND (arg1, 0);
7084 tree a11 = TREE_OPERAND (arg1, 1);
7085 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7086 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7087 && (code == TRUTH_AND_EXPR
7088 || code == TRUTH_OR_EXPR));
7090 if (operand_equal_p (a00, a10, 0))
7091 return fold (build (TREE_CODE (arg0), type, a00,
7092 fold (build (code, type, a01, a11))));
7093 else if (commutative && operand_equal_p (a00, a11, 0))
7094 return fold (build (TREE_CODE (arg0), type, a00,
7095 fold (build (code, type, a01, a10))));
7096 else if (commutative && operand_equal_p (a01, a10, 0))
7097 return fold (build (TREE_CODE (arg0), type, a01,
7098 fold (build (code, type, a00, a11))));
7100 /* This case if tricky because we must either have commutative
7101 operators or else A10 must not have side-effects. */
7103 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7104 && operand_equal_p (a01, a11, 0))
7105 return fold (build (TREE_CODE (arg0), type,
7106 fold (build (code, type, a00, a10)),
7110 /* See if we can build a range comparison. */
7111 if (0 != (tem = fold_range_test (t)))
7114 /* Check for the possibility of merging component references. If our
7115 lhs is another similar operation, try to merge its rhs with our
7116 rhs. Then try to merge our lhs and rhs. */
7117 if (TREE_CODE (arg0) == code
7118 && 0 != (tem = fold_truthop (code, type,
7119 TREE_OPERAND (arg0, 1), arg1)))
7120 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7122 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7127 case TRUTH_ORIF_EXPR:
7128 /* Note that the operands of this must be ints
7129 and their values must be 0 or true.
7130 ("true" is a fixed value perhaps depending on the language.) */
7131 /* If first arg is constant true, return it. */
7132 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7133 return fold_convert (type, arg0);
7135 /* If either arg is constant zero, drop it. */
7136 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7137 return non_lvalue (fold_convert (type, arg1));
7138 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7139 /* Preserve sequence points. */
7140 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7141 return non_lvalue (fold_convert (type, arg0));
7142 /* If second arg is constant true, result is true, but we must
7143 evaluate first arg. */
7144 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7145 return omit_one_operand (type, arg1, arg0);
7146 /* Likewise for first arg, but note this only occurs here for
7148 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7149 return omit_one_operand (type, arg0, arg1);
7152 case TRUTH_XOR_EXPR:
7153 /* If either arg is constant zero, drop it. */
7154 if (integer_zerop (arg0))
7155 return non_lvalue (fold_convert (type, arg1));
7156 if (integer_zerop (arg1))
7157 return non_lvalue (fold_convert (type, arg0));
7158 /* If either arg is constant true, this is a logical inversion. */
7159 if (integer_onep (arg0))
7160 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7161 if (integer_onep (arg1))
7162 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7171 /* If one arg is a real or integer constant, put it last. */
7172 if (tree_swap_operands_p (arg0, arg1, true))
7173 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7175 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7177 tree targ0 = strip_float_extensions (arg0);
7178 tree targ1 = strip_float_extensions (arg1);
7179 tree newtype = TREE_TYPE (targ0);
7181 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7182 newtype = TREE_TYPE (targ1);
7184 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7185 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7186 return fold (build (code, type, fold_convert (newtype, targ0),
7187 fold_convert (newtype, targ1)));
7189 /* (-a) CMP (-b) -> b CMP a */
7190 if (TREE_CODE (arg0) == NEGATE_EXPR
7191 && TREE_CODE (arg1) == NEGATE_EXPR)
7192 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7193 TREE_OPERAND (arg0, 0)));
7195 if (TREE_CODE (arg1) == REAL_CST)
7197 REAL_VALUE_TYPE cst;
7198 cst = TREE_REAL_CST (arg1);
7200 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7201 if (TREE_CODE (arg0) == NEGATE_EXPR)
7203 fold (build (swap_tree_comparison (code), type,
7204 TREE_OPERAND (arg0, 0),
7205 build_real (TREE_TYPE (arg1),
7206 REAL_VALUE_NEGATE (cst))));
7208 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7209 /* a CMP (-0) -> a CMP 0 */
7210 if (REAL_VALUE_MINUS_ZERO (cst))
7211 return fold (build (code, type, arg0,
7212 build_real (TREE_TYPE (arg1), dconst0)));
7214 /* x != NaN is always true, other ops are always false. */
7215 if (REAL_VALUE_ISNAN (cst)
7216 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7218 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7219 return omit_one_operand (type, fold_convert (type, t), arg0);
7222 /* Fold comparisons against infinity. */
7223 if (REAL_VALUE_ISINF (cst))
7225 tem = fold_inf_compare (code, type, arg0, arg1);
7226 if (tem != NULL_TREE)
7231 /* If this is a comparison of a real constant with a PLUS_EXPR
7232 or a MINUS_EXPR of a real constant, we can convert it into a
7233 comparison with a revised real constant as long as no overflow
7234 occurs when unsafe_math_optimizations are enabled. */
7235 if (flag_unsafe_math_optimizations
7236 && TREE_CODE (arg1) == REAL_CST
7237 && (TREE_CODE (arg0) == PLUS_EXPR
7238 || TREE_CODE (arg0) == MINUS_EXPR)
7239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7240 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7241 ? MINUS_EXPR : PLUS_EXPR,
7242 arg1, TREE_OPERAND (arg0, 1), 0))
7243 && ! TREE_CONSTANT_OVERFLOW (tem))
7244 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7246 /* Likewise, we can simplify a comparison of a real constant with
7247 a MINUS_EXPR whose first operand is also a real constant, i.e.
7248 (c1 - x) < c2 becomes x > c1-c2. */
7249 if (flag_unsafe_math_optimizations
7250 && TREE_CODE (arg1) == REAL_CST
7251 && TREE_CODE (arg0) == MINUS_EXPR
7252 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7253 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7255 && ! TREE_CONSTANT_OVERFLOW (tem))
7256 return fold (build (swap_tree_comparison (code), type,
7257 TREE_OPERAND (arg0, 1), tem));
7259 /* Fold comparisons against built-in math functions. */
7260 if (TREE_CODE (arg1) == REAL_CST
7261 && flag_unsafe_math_optimizations
7262 && ! flag_errno_math)
7264 enum built_in_function fcode = builtin_mathfn_code (arg0);
7266 if (fcode != END_BUILTINS)
7268 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7269 if (tem != NULL_TREE)
7275 /* Convert foo++ == CONST into ++foo == CONST + INCR.
7276 First, see if one arg is constant; find the constant arg
7277 and the other one. */
7279 tree constop = 0, varop = NULL_TREE;
7280 int constopnum = -1;
7282 if (TREE_CONSTANT (arg1))
7283 constopnum = 1, constop = arg1, varop = arg0;
7284 if (TREE_CONSTANT (arg0))
7285 constopnum = 0, constop = arg0, varop = arg1;
7287 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
7289 /* This optimization is invalid for ordered comparisons
7290 if CONST+INCR overflows or if foo+incr might overflow.
7291 This optimization is invalid for floating point due to rounding.
7292 For pointer types we assume overflow doesn't happen. */
7293 if (POINTER_TYPE_P (TREE_TYPE (varop))
7294 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7295 && (code == EQ_EXPR || code == NE_EXPR)))
7298 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
7299 constop, TREE_OPERAND (varop, 1)));
7301 /* Do not overwrite the current varop to be a preincrement,
7302 create a new node so that we won't confuse our caller who
7303 might create trees and throw them away, reusing the
7304 arguments that they passed to build. This shows up in
7305 the THEN or ELSE parts of ?: being postincrements. */
7306 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7307 TREE_OPERAND (varop, 0),
7308 TREE_OPERAND (varop, 1));
7310 /* If VAROP is a reference to a bitfield, we must mask
7311 the constant by the width of the field. */
7312 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7313 && DECL_BIT_FIELD(TREE_OPERAND
7314 (TREE_OPERAND (varop, 0), 1)))
7317 = TREE_INT_CST_LOW (DECL_SIZE
7319 (TREE_OPERAND (varop, 0), 1)));
7320 tree mask, unsigned_type;
7321 unsigned int precision;
7322 tree folded_compare;
7324 /* First check whether the comparison would come out
7325 always the same. If we don't do that we would
7326 change the meaning with the masking. */
7327 if (constopnum == 0)
7328 folded_compare = fold (build (code, type, constop,
7329 TREE_OPERAND (varop, 0)));
7331 folded_compare = fold (build (code, type,
7332 TREE_OPERAND (varop, 0),
7334 if (integer_zerop (folded_compare)
7335 || integer_onep (folded_compare))
7336 return omit_one_operand (type, folded_compare, varop);
7338 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7339 precision = TYPE_PRECISION (unsigned_type);
7340 mask = build_int_2 (~0, ~0);
7341 TREE_TYPE (mask) = unsigned_type;
7342 force_fit_type (mask, 0);
7343 mask = const_binop (RSHIFT_EXPR, mask,
7344 size_int (precision - size), 0);
7345 newconst = fold (build (BIT_AND_EXPR,
7346 TREE_TYPE (varop), newconst,
7347 fold_convert (TREE_TYPE (varop),
7351 t = build (code, type,
7352 (constopnum == 0) ? newconst : varop,
7353 (constopnum == 1) ? newconst : varop);
7357 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7359 if (POINTER_TYPE_P (TREE_TYPE (varop))
7360 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7361 && (code == EQ_EXPR || code == NE_EXPR)))
7364 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7365 constop, TREE_OPERAND (varop, 1)));
7367 /* Do not overwrite the current varop to be a predecrement,
7368 create a new node so that we won't confuse our caller who
7369 might create trees and throw them away, reusing the
7370 arguments that they passed to build. This shows up in
7371 the THEN or ELSE parts of ?: being postdecrements. */
7372 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7373 TREE_OPERAND (varop, 0),
7374 TREE_OPERAND (varop, 1));
7376 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7377 && DECL_BIT_FIELD(TREE_OPERAND
7378 (TREE_OPERAND (varop, 0), 1)))
7381 = TREE_INT_CST_LOW (DECL_SIZE
7383 (TREE_OPERAND (varop, 0), 1)));
7384 tree mask, unsigned_type;
7385 unsigned int precision;
7386 tree folded_compare;
7388 if (constopnum == 0)
7389 folded_compare = fold (build (code, type, constop,
7390 TREE_OPERAND (varop, 0)));
7392 folded_compare = fold (build (code, type,
7393 TREE_OPERAND (varop, 0),
7395 if (integer_zerop (folded_compare)
7396 || integer_onep (folded_compare))
7397 return omit_one_operand (type, folded_compare, varop);
7399 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7400 precision = TYPE_PRECISION (unsigned_type);
7401 mask = build_int_2 (~0, ~0);
7402 TREE_TYPE (mask) = TREE_TYPE (varop);
7403 force_fit_type (mask, 0);
7404 mask = const_binop (RSHIFT_EXPR, mask,
7405 size_int (precision - size), 0);
7406 newconst = fold (build (BIT_AND_EXPR,
7407 TREE_TYPE (varop), newconst,
7408 fold_convert (TREE_TYPE (varop),
7412 t = build (code, type,
7413 (constopnum == 0) ? newconst : varop,
7414 (constopnum == 1) ? newconst : varop);
7420 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7421 This transformation affects the cases which are handled in later
7422 optimizations involving comparisons with non-negative constants. */
7423 if (TREE_CODE (arg1) == INTEGER_CST
7424 && TREE_CODE (arg0) != INTEGER_CST
7425 && tree_int_cst_sgn (arg1) > 0)
7430 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7431 return fold (build (GT_EXPR, type, arg0, arg1));
7434 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7435 return fold (build (LE_EXPR, type, arg0, arg1));
7442 /* Comparisons with the highest or lowest possible integer of
7443 the specified size will have known values. */
7445 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7447 if (TREE_CODE (arg1) == INTEGER_CST
7448 && ! TREE_CONSTANT_OVERFLOW (arg1)
7449 && width <= HOST_BITS_PER_WIDE_INT
7450 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7451 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7453 unsigned HOST_WIDE_INT signed_max;
7454 unsigned HOST_WIDE_INT max, min;
7456 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7458 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7460 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7466 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7469 if (TREE_INT_CST_HIGH (arg1) == 0
7470 && TREE_INT_CST_LOW (arg1) == max)
7474 return omit_one_operand (type,
7479 return fold (build (EQ_EXPR, type, arg0, arg1));
7482 return omit_one_operand (type,
7487 return fold (build (NE_EXPR, type, arg0, arg1));
7489 /* The GE_EXPR and LT_EXPR cases above are not normally
7490 reached because of previous transformations. */
7495 else if (TREE_INT_CST_HIGH (arg1) == 0
7496 && TREE_INT_CST_LOW (arg1) == max - 1)
7500 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7501 return fold (build (EQ_EXPR, type, arg0, arg1));
7503 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7504 return fold (build (NE_EXPR, type, arg0, arg1));
7508 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7509 && TREE_INT_CST_LOW (arg1) == min)
7513 return omit_one_operand (type,
7518 return fold (build (EQ_EXPR, type, arg0, arg1));
7521 return omit_one_operand (type,
7526 return fold (build (NE_EXPR, type, arg0, arg1));
7531 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7532 && TREE_INT_CST_LOW (arg1) == min + 1)
7536 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7537 return fold (build (NE_EXPR, type, arg0, arg1));
7539 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7540 return fold (build (EQ_EXPR, type, arg0, arg1));
7545 else if (TREE_INT_CST_HIGH (arg1) == 0
7546 && TREE_INT_CST_LOW (arg1) == signed_max
7547 && TREE_UNSIGNED (TREE_TYPE (arg1))
7548 /* signed_type does not work on pointer types. */
7549 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7551 /* The following case also applies to X < signed_max+1
7552 and X >= signed_max+1 because previous transformations. */
7553 if (code == LE_EXPR || code == GT_EXPR)
7556 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7557 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7559 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7560 type, fold_convert (st0, arg0),
7561 fold_convert (st1, integer_zero_node)));
7567 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7568 a MINUS_EXPR of a constant, we can convert it into a comparison with
7569 a revised constant as long as no overflow occurs. */
7570 if ((code == EQ_EXPR || code == NE_EXPR)
7571 && TREE_CODE (arg1) == INTEGER_CST
7572 && (TREE_CODE (arg0) == PLUS_EXPR
7573 || TREE_CODE (arg0) == MINUS_EXPR)
7574 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7575 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7576 ? MINUS_EXPR : PLUS_EXPR,
7577 arg1, TREE_OPERAND (arg0, 1), 0))
7578 && ! TREE_CONSTANT_OVERFLOW (tem))
7579 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7581 /* Similarly for a NEGATE_EXPR. */
7582 else if ((code == EQ_EXPR || code == NE_EXPR)
7583 && TREE_CODE (arg0) == NEGATE_EXPR
7584 && TREE_CODE (arg1) == INTEGER_CST
7585 && 0 != (tem = negate_expr (arg1))
7586 && TREE_CODE (tem) == INTEGER_CST
7587 && ! TREE_CONSTANT_OVERFLOW (tem))
7588 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7590 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7591 for !=. Don't do this for ordered comparisons due to overflow. */
7592 else if ((code == NE_EXPR || code == EQ_EXPR)
7593 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7594 return fold (build (code, type,
7595 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7597 /* If we are widening one operand of an integer comparison,
7598 see if the other operand is similarly being widened. Perhaps we
7599 can do the comparison in the narrower type. */
7600 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7601 && TREE_CODE (arg0) == NOP_EXPR
7602 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7603 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7604 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7605 || (TREE_CODE (t1) == INTEGER_CST
7606 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7607 return fold (build (code, type, tem,
7608 fold_convert (TREE_TYPE (tem), t1)));
7610 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7611 constant, we can simplify it. */
7612 else if (TREE_CODE (arg1) == INTEGER_CST
7613 && (TREE_CODE (arg0) == MIN_EXPR
7614 || TREE_CODE (arg0) == MAX_EXPR)
7615 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7616 return optimize_minmax_comparison (t);
7618 /* If we are comparing an ABS_EXPR with a constant, we can
7619 convert all the cases into explicit comparisons, but they may
7620 well not be faster than doing the ABS and one comparison.
7621 But ABS (X) <= C is a range comparison, which becomes a subtraction
7622 and a comparison, and is probably faster. */
7623 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7624 && TREE_CODE (arg0) == ABS_EXPR
7625 && ! TREE_SIDE_EFFECTS (arg0)
7626 && (0 != (tem = negate_expr (arg1)))
7627 && TREE_CODE (tem) == INTEGER_CST
7628 && ! TREE_CONSTANT_OVERFLOW (tem))
7629 return fold (build (TRUTH_ANDIF_EXPR, type,
7630 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7631 build (LE_EXPR, type,
7632 TREE_OPERAND (arg0, 0), arg1)));
7634 /* If this is an EQ or NE comparison with zero and ARG0 is
7635 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7636 two operations, but the latter can be done in one less insn
7637 on machines that have only two-operand insns or on which a
7638 constant cannot be the first operand. */
7639 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7640 && TREE_CODE (arg0) == BIT_AND_EXPR)
7642 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7643 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7645 fold (build (code, type,
7646 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7648 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7649 TREE_OPERAND (arg0, 1),
7650 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7651 fold_convert (TREE_TYPE (arg0),
7654 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7655 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7657 fold (build (code, type,
7658 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7660 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7661 TREE_OPERAND (arg0, 0),
7662 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7663 fold_convert (TREE_TYPE (arg0),
7668 /* If this is an NE or EQ comparison of zero against the result of a
7669 signed MOD operation whose second operand is a power of 2, make
7670 the MOD operation unsigned since it is simpler and equivalent. */
7671 if ((code == NE_EXPR || code == EQ_EXPR)
7672 && integer_zerop (arg1)
7673 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7674 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7675 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7676 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7677 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7678 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7680 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7681 tree newmod = build (TREE_CODE (arg0), newtype,
7682 fold_convert (newtype,
7683 TREE_OPERAND (arg0, 0)),
7684 fold_convert (newtype,
7685 TREE_OPERAND (arg0, 1)));
7687 return build (code, type, newmod, fold_convert (newtype, arg1));
7690 /* If this is an NE comparison of zero with an AND of one, remove the
7691 comparison since the AND will give the correct value. */
7692 if (code == NE_EXPR && integer_zerop (arg1)
7693 && TREE_CODE (arg0) == BIT_AND_EXPR
7694 && integer_onep (TREE_OPERAND (arg0, 1)))
7695 return fold_convert (type, arg0);
7697 /* If we have (A & C) == C where C is a power of 2, convert this into
7698 (A & C) != 0. Similarly for NE_EXPR. */
7699 if ((code == EQ_EXPR || code == NE_EXPR)
7700 && TREE_CODE (arg0) == BIT_AND_EXPR
7701 && integer_pow2p (TREE_OPERAND (arg0, 1))
7702 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7703 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7704 arg0, integer_zero_node));
7706 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7707 2, then fold the expression into shifts and logical operations. */
7708 tem = fold_single_bit_test (code, arg0, arg1, type);
7712 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7713 Similarly for NE_EXPR. */
7714 if ((code == EQ_EXPR || code == NE_EXPR)
7715 && TREE_CODE (arg0) == BIT_AND_EXPR
7716 && TREE_CODE (arg1) == INTEGER_CST
7717 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7720 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7721 arg1, build1 (BIT_NOT_EXPR,
7722 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7723 TREE_OPERAND (arg0, 1))));
7724 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7725 if (integer_nonzerop (dandnotc))
7726 return omit_one_operand (type, rslt, arg0);
7729 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7730 Similarly for NE_EXPR. */
7731 if ((code == EQ_EXPR || code == NE_EXPR)
7732 && TREE_CODE (arg0) == BIT_IOR_EXPR
7733 && TREE_CODE (arg1) == INTEGER_CST
7734 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7737 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7738 TREE_OPERAND (arg0, 1),
7739 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7740 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7741 if (integer_nonzerop (candnotd))
7742 return omit_one_operand (type, rslt, arg0);
7745 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7746 and similarly for >= into !=. */
7747 if ((code == LT_EXPR || code == GE_EXPR)
7748 && TREE_UNSIGNED (TREE_TYPE (arg0))
7749 && TREE_CODE (arg1) == LSHIFT_EXPR
7750 && integer_onep (TREE_OPERAND (arg1, 0)))
7751 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7752 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7753 TREE_OPERAND (arg1, 1)),
7754 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7756 else if ((code == LT_EXPR || code == GE_EXPR)
7757 && TREE_UNSIGNED (TREE_TYPE (arg0))
7758 && (TREE_CODE (arg1) == NOP_EXPR
7759 || TREE_CODE (arg1) == CONVERT_EXPR)
7760 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7761 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7763 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7764 fold_convert (TREE_TYPE (arg0),
7765 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7766 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7768 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7770 /* Simplify comparison of something with itself. (For IEEE
7771 floating-point, we can only do some of these simplifications.) */
7772 if (operand_equal_p (arg0, arg1, 0))
7777 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7778 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7779 return constant_boolean_node (1, type);
7784 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7785 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7786 return constant_boolean_node (1, type);
7787 return fold (build (EQ_EXPR, type, arg0, arg1));
7790 /* For NE, we can only do this simplification if integer
7791 or we don't honor IEEE floating point NaNs. */
7792 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7793 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7795 /* ... fall through ... */
7798 return constant_boolean_node (0, type);
7804 /* If we are comparing an expression that just has comparisons
7805 of two integer values, arithmetic expressions of those comparisons,
7806 and constants, we can simplify it. There are only three cases
7807 to check: the two values can either be equal, the first can be
7808 greater, or the second can be greater. Fold the expression for
7809 those three values. Since each value must be 0 or 1, we have
7810 eight possibilities, each of which corresponds to the constant 0
7811 or 1 or one of the six possible comparisons.
7813 This handles common cases like (a > b) == 0 but also handles
7814 expressions like ((x > y) - (y > x)) > 0, which supposedly
7815 occur in macroized code. */
7817 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7819 tree cval1 = 0, cval2 = 0;
7822 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7823 /* Don't handle degenerate cases here; they should already
7824 have been handled anyway. */
7825 && cval1 != 0 && cval2 != 0
7826 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7827 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7828 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7829 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7830 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7831 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7832 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7834 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7835 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7837 /* We can't just pass T to eval_subst in case cval1 or cval2
7838 was the same as ARG1. */
7841 = fold (build (code, type,
7842 eval_subst (arg0, cval1, maxval, cval2, minval),
7845 = fold (build (code, type,
7846 eval_subst (arg0, cval1, maxval, cval2, maxval),
7849 = fold (build (code, type,
7850 eval_subst (arg0, cval1, minval, cval2, maxval),
7853 /* All three of these results should be 0 or 1. Confirm they
7854 are. Then use those values to select the proper code
7857 if ((integer_zerop (high_result)
7858 || integer_onep (high_result))
7859 && (integer_zerop (equal_result)
7860 || integer_onep (equal_result))
7861 && (integer_zerop (low_result)
7862 || integer_onep (low_result)))
7864 /* Make a 3-bit mask with the high-order bit being the
7865 value for `>', the next for '=', and the low for '<'. */
7866 switch ((integer_onep (high_result) * 4)
7867 + (integer_onep (equal_result) * 2)
7868 + integer_onep (low_result))
7872 return omit_one_operand (type, integer_zero_node, arg0);
7893 return omit_one_operand (type, integer_one_node, arg0);
7896 t = build (code, type, cval1, cval2);
7898 return save_expr (t);
7905 /* If this is a comparison of a field, we may be able to simplify it. */
7906 if (((TREE_CODE (arg0) == COMPONENT_REF
7907 && (*lang_hooks.can_use_bit_fields_p) ())
7908 || TREE_CODE (arg0) == BIT_FIELD_REF)
7909 && (code == EQ_EXPR || code == NE_EXPR)
7910 /* Handle the constant case even without -O
7911 to make sure the warnings are given. */
7912 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7914 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7919 /* If this is a comparison of complex values and either or both sides
7920 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7921 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7922 This may prevent needless evaluations. */
7923 if ((code == EQ_EXPR || code == NE_EXPR)
7924 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7925 && (TREE_CODE (arg0) == COMPLEX_EXPR
7926 || TREE_CODE (arg1) == COMPLEX_EXPR
7927 || TREE_CODE (arg0) == COMPLEX_CST
7928 || TREE_CODE (arg1) == COMPLEX_CST))
7930 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7931 tree real0, imag0, real1, imag1;
7933 arg0 = save_expr (arg0);
7934 arg1 = save_expr (arg1);
7935 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7936 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7937 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7938 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7940 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7943 fold (build (code, type, real0, real1)),
7944 fold (build (code, type, imag0, imag1))));
7947 /* Optimize comparisons of strlen vs zero to a compare of the
7948 first character of the string vs zero. To wit,
7949 strlen(ptr) == 0 => *ptr == 0
7950 strlen(ptr) != 0 => *ptr != 0
7951 Other cases should reduce to one of these two (or a constant)
7952 due to the return value of strlen being unsigned. */
7953 if ((code == EQ_EXPR || code == NE_EXPR)
7954 && integer_zerop (arg1)
7955 && TREE_CODE (arg0) == CALL_EXPR)
7957 tree fndecl = get_callee_fndecl (arg0);
7961 && DECL_BUILT_IN (fndecl)
7962 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7963 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7964 && (arglist = TREE_OPERAND (arg0, 1))
7965 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7966 && ! TREE_CHAIN (arglist))
7967 return fold (build (code, type,
7968 build1 (INDIRECT_REF, char_type_node,
7969 TREE_VALUE(arglist)),
7970 integer_zero_node));
7973 /* From here on, the only cases we handle are when the result is
7974 known to be a constant.
7976 To compute GT, swap the arguments and do LT.
7977 To compute GE, do LT and invert the result.
7978 To compute LE, swap the arguments, do LT and invert the result.
7979 To compute NE, do EQ and invert the result.
7981 Therefore, the code below must handle only EQ and LT. */
7983 if (code == LE_EXPR || code == GT_EXPR)
7985 tem = arg0, arg0 = arg1, arg1 = tem;
7986 code = swap_tree_comparison (code);
7989 /* Note that it is safe to invert for real values here because we
7990 will check below in the one case that it matters. */
7994 if (code == NE_EXPR || code == GE_EXPR)
7997 code = invert_tree_comparison (code);
8000 /* Compute a result for LT or EQ if args permit;
8001 otherwise return T. */
8002 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8004 if (code == EQ_EXPR)
8005 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
8007 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
8008 ? INT_CST_LT_UNSIGNED (arg0, arg1)
8009 : INT_CST_LT (arg0, arg1)),
8013 #if 0 /* This is no longer useful, but breaks some real code. */
8014 /* Assume a nonexplicit constant cannot equal an explicit one,
8015 since such code would be undefined anyway.
8016 Exception: on sysvr4, using #pragma weak,
8017 a label can come out as 0. */
8018 else if (TREE_CODE (arg1) == INTEGER_CST
8019 && !integer_zerop (arg1)
8020 && TREE_CONSTANT (arg0)
8021 && TREE_CODE (arg0) == ADDR_EXPR
8023 t1 = build_int_2 (0, 0);
8025 /* Two real constants can be compared explicitly. */
8026 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8028 /* If either operand is a NaN, the result is false with two
8029 exceptions: First, an NE_EXPR is true on NaNs, but that case
8030 is already handled correctly since we will be inverting the
8031 result for NE_EXPR. Second, if we had inverted a LE_EXPR
8032 or a GE_EXPR into a LT_EXPR, we must return true so that it
8033 will be inverted into false. */
8035 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8036 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
8037 t1 = build_int_2 (invert && code == LT_EXPR, 0);
8039 else if (code == EQ_EXPR)
8040 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
8041 TREE_REAL_CST (arg1)),
8044 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
8045 TREE_REAL_CST (arg1)),
8049 if (t1 == NULL_TREE)
8053 TREE_INT_CST_LOW (t1) ^= 1;
8055 TREE_TYPE (t1) = type;
8056 if (TREE_CODE (type) == BOOLEAN_TYPE)
8057 return (*lang_hooks.truthvalue_conversion) (t1);
8061 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8062 so all simple results must be passed through pedantic_non_lvalue. */
8063 if (TREE_CODE (arg0) == INTEGER_CST)
8065 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8066 /* Only optimize constant conditions when the selected branch
8067 has the same type as the COND_EXPR. This avoids optimizing
8068 away "c ? x : throw", where the throw has a void type. */
8069 if (! VOID_TYPE_P (TREE_TYPE (tem))
8070 || VOID_TYPE_P (TREE_TYPE (t)))
8071 return pedantic_non_lvalue (tem);
8074 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
8075 return pedantic_omit_one_operand (type, arg1, arg0);
8077 /* If we have A op B ? A : C, we may be able to convert this to a
8078 simpler expression, depending on the operation and the values
8079 of B and C. Signed zeros prevent all of these transformations,
8080 for reasons given above each one. */
8082 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8083 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8084 arg1, TREE_OPERAND (arg0, 1))
8085 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8087 tree arg2 = TREE_OPERAND (t, 2);
8088 enum tree_code comp_code = TREE_CODE (arg0);
8092 /* If we have A op 0 ? A : -A, consider applying the following
8095 A == 0? A : -A same as -A
8096 A != 0? A : -A same as A
8097 A >= 0? A : -A same as abs (A)
8098 A > 0? A : -A same as abs (A)
8099 A <= 0? A : -A same as -abs (A)
8100 A < 0? A : -A same as -abs (A)
8102 None of these transformations work for modes with signed
8103 zeros. If A is +/-0, the first two transformations will
8104 change the sign of the result (from +0 to -0, or vice
8105 versa). The last four will fix the sign of the result,
8106 even though the original expressions could be positive or
8107 negative, depending on the sign of A.
8109 Note that all these transformations are correct if A is
8110 NaN, since the two alternatives (A and -A) are also NaNs. */
8111 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8112 ? real_zerop (TREE_OPERAND (arg0, 1))
8113 : integer_zerop (TREE_OPERAND (arg0, 1)))
8114 && TREE_CODE (arg2) == NEGATE_EXPR
8115 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8119 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8120 tem = fold_convert (type, negate_expr (tem));
8121 return pedantic_non_lvalue (tem);
8123 return pedantic_non_lvalue (fold_convert (type, arg1));
8126 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8127 arg1 = fold_convert ((*lang_hooks.types.signed_type)
8128 (TREE_TYPE (arg1)), arg1);
8129 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8130 return pedantic_non_lvalue (fold_convert (type, arg1));
8133 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8134 arg1 = fold_convert ((lang_hooks.types.signed_type)
8135 (TREE_TYPE (arg1)), arg1);
8136 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8137 arg1 = negate_expr (fold_convert (type, arg1));
8138 return pedantic_non_lvalue (arg1);
8143 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8144 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8145 both transformations are correct when A is NaN: A != 0
8146 is then true, and A == 0 is false. */
8148 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8150 if (comp_code == NE_EXPR)
8151 return pedantic_non_lvalue (fold_convert (type, arg1));
8152 else if (comp_code == EQ_EXPR)
8153 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8156 /* Try some transformations of A op B ? A : B.
8158 A == B? A : B same as B
8159 A != B? A : B same as A
8160 A >= B? A : B same as max (A, B)
8161 A > B? A : B same as max (B, A)
8162 A <= B? A : B same as min (A, B)
8163 A < B? A : B same as min (B, A)
8165 As above, these transformations don't work in the presence
8166 of signed zeros. For example, if A and B are zeros of
8167 opposite sign, the first two transformations will change
8168 the sign of the result. In the last four, the original
8169 expressions give different results for (A=+0, B=-0) and
8170 (A=-0, B=+0), but the transformed expressions do not.
8172 The first two transformations are correct if either A or B
8173 is a NaN. In the first transformation, the condition will
8174 be false, and B will indeed be chosen. In the case of the
8175 second transformation, the condition A != B will be true,
8176 and A will be chosen.
8178 The conversions to max() and min() are not correct if B is
8179 a number and A is not. The conditions in the original
8180 expressions will be false, so all four give B. The min()
8181 and max() versions would give a NaN instead. */
8182 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8183 arg2, TREE_OPERAND (arg0, 0)))
8185 tree comp_op0 = TREE_OPERAND (arg0, 0);
8186 tree comp_op1 = TREE_OPERAND (arg0, 1);
8187 tree comp_type = TREE_TYPE (comp_op0);
8189 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8190 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8200 return pedantic_non_lvalue (fold_convert (type, arg2));
8202 return pedantic_non_lvalue (fold_convert (type, arg1));
8205 /* In C++ a ?: expression can be an lvalue, so put the
8206 operand which will be used if they are equal first
8207 so that we can convert this back to the
8208 corresponding COND_EXPR. */
8209 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8210 return pedantic_non_lvalue (fold_convert
8211 (type, fold (build (MIN_EXPR, comp_type,
8212 (comp_code == LE_EXPR
8213 ? comp_op0 : comp_op1),
8214 (comp_code == LE_EXPR
8215 ? comp_op1 : comp_op0)))));
8219 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8220 return pedantic_non_lvalue (fold_convert
8221 (type, fold (build (MAX_EXPR, comp_type,
8222 (comp_code == GE_EXPR
8223 ? comp_op0 : comp_op1),
8224 (comp_code == GE_EXPR
8225 ? comp_op1 : comp_op0)))));
8232 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8233 we might still be able to simplify this. For example,
8234 if C1 is one less or one more than C2, this might have started
8235 out as a MIN or MAX and been transformed by this function.
8236 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8238 if (INTEGRAL_TYPE_P (type)
8239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8240 && TREE_CODE (arg2) == INTEGER_CST)
8244 /* We can replace A with C1 in this case. */
8245 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8246 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8247 TREE_OPERAND (t, 2)));
8250 /* If C1 is C2 + 1, this is min(A, C2). */
8251 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8252 && operand_equal_p (TREE_OPERAND (arg0, 1),
8253 const_binop (PLUS_EXPR, arg2,
8254 integer_one_node, 0), 1))
8255 return pedantic_non_lvalue
8256 (fold (build (MIN_EXPR, type, arg1, arg2)));
8260 /* If C1 is C2 - 1, this is min(A, C2). */
8261 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8262 && operand_equal_p (TREE_OPERAND (arg0, 1),
8263 const_binop (MINUS_EXPR, arg2,
8264 integer_one_node, 0), 1))
8265 return pedantic_non_lvalue
8266 (fold (build (MIN_EXPR, type, arg1, arg2)));
8270 /* If C1 is C2 - 1, this is max(A, C2). */
8271 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8272 && operand_equal_p (TREE_OPERAND (arg0, 1),
8273 const_binop (MINUS_EXPR, arg2,
8274 integer_one_node, 0), 1))
8275 return pedantic_non_lvalue
8276 (fold (build (MAX_EXPR, type, arg1, arg2)));
8280 /* If C1 is C2 + 1, this is max(A, C2). */
8281 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8282 && operand_equal_p (TREE_OPERAND (arg0, 1),
8283 const_binop (PLUS_EXPR, arg2,
8284 integer_one_node, 0), 1))
8285 return pedantic_non_lvalue
8286 (fold (build (MAX_EXPR, type, arg1, arg2)));
8295 /* If the second operand is simpler than the third, swap them
8296 since that produces better jump optimization results. */
8297 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8298 TREE_OPERAND (t, 2), false))
8300 /* See if this can be inverted. If it can't, possibly because
8301 it was a floating-point inequality comparison, don't do
8303 tem = invert_truthvalue (arg0);
8305 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8306 return fold (build (code, type, tem,
8307 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8310 /* Convert A ? 1 : 0 to simply A. */
8311 if (integer_onep (TREE_OPERAND (t, 1))
8312 && integer_zerop (TREE_OPERAND (t, 2))
8313 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8314 call to fold will try to move the conversion inside
8315 a COND, which will recurse. In that case, the COND_EXPR
8316 is probably the best choice, so leave it alone. */
8317 && type == TREE_TYPE (arg0))
8318 return pedantic_non_lvalue (arg0);
8320 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8321 over COND_EXPR in cases such as floating point comparisons. */
8322 if (integer_zerop (TREE_OPERAND (t, 1))
8323 && integer_onep (TREE_OPERAND (t, 2))
8324 && truth_value_p (TREE_CODE (arg0)))
8325 return pedantic_non_lvalue (fold_convert (type,
8326 invert_truthvalue (arg0)));
8328 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8329 operation is simply A & 2. */
8331 if (integer_zerop (TREE_OPERAND (t, 2))
8332 && TREE_CODE (arg0) == NE_EXPR
8333 && integer_zerop (TREE_OPERAND (arg0, 1))
8334 && integer_pow2p (arg1)
8335 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8336 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8338 return pedantic_non_lvalue (fold_convert (type,
8339 TREE_OPERAND (arg0, 0)));
8341 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8342 if (integer_zerop (TREE_OPERAND (t, 2))
8343 && truth_value_p (TREE_CODE (arg0))
8344 && truth_value_p (TREE_CODE (arg1)))
8345 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8348 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8349 if (integer_onep (TREE_OPERAND (t, 2))
8350 && truth_value_p (TREE_CODE (arg0))
8351 && truth_value_p (TREE_CODE (arg1)))
8353 /* Only perform transformation if ARG0 is easily inverted. */
8354 tem = invert_truthvalue (arg0);
8355 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8356 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8363 /* When pedantic, a compound expression can be neither an lvalue
8364 nor an integer constant expression. */
8365 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8367 /* Don't let (0, 0) be null pointer constant. */
8368 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8369 : fold_convert (type, arg1);
8370 return pedantic_non_lvalue (tem);
8374 return build_complex (type, arg0, arg1);
8378 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8380 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8381 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8382 TREE_OPERAND (arg0, 1));
8383 else if (TREE_CODE (arg0) == COMPLEX_CST)
8384 return TREE_REALPART (arg0);
8385 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8386 return fold (build (TREE_CODE (arg0), type,
8387 fold (build1 (REALPART_EXPR, type,
8388 TREE_OPERAND (arg0, 0))),
8389 fold (build1 (REALPART_EXPR,
8390 type, TREE_OPERAND (arg0, 1)))));
8394 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8395 return fold_convert (type, integer_zero_node);
8396 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8397 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8398 TREE_OPERAND (arg0, 0));
8399 else if (TREE_CODE (arg0) == COMPLEX_CST)
8400 return TREE_IMAGPART (arg0);
8401 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8402 return fold (build (TREE_CODE (arg0), type,
8403 fold (build1 (IMAGPART_EXPR, type,
8404 TREE_OPERAND (arg0, 0))),
8405 fold (build1 (IMAGPART_EXPR, type,
8406 TREE_OPERAND (arg0, 1)))));
8409 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8411 case CLEANUP_POINT_EXPR:
8412 if (! has_cleanups (arg0))
8413 return TREE_OPERAND (t, 0);
8416 enum tree_code code0 = TREE_CODE (arg0);
8417 int kind0 = TREE_CODE_CLASS (code0);
8418 tree arg00 = TREE_OPERAND (arg0, 0);
8421 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8422 return fold (build1 (code0, type,
8423 fold (build1 (CLEANUP_POINT_EXPR,
8424 TREE_TYPE (arg00), arg00))));
8426 if (kind0 == '<' || kind0 == '2'
8427 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8428 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8429 || code0 == TRUTH_XOR_EXPR)
8431 arg01 = TREE_OPERAND (arg0, 1);
8433 if (TREE_CONSTANT (arg00)
8434 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8435 && ! has_cleanups (arg00)))
8436 return fold (build (code0, type, arg00,
8437 fold (build1 (CLEANUP_POINT_EXPR,
8438 TREE_TYPE (arg01), arg01))));
8440 if (TREE_CONSTANT (arg01))
8441 return fold (build (code0, type,
8442 fold (build1 (CLEANUP_POINT_EXPR,
8443 TREE_TYPE (arg00), arg00)),
8451 /* Check for a built-in function. */
8452 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8453 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8455 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8457 tree tmp = fold_builtin (expr);
8465 } /* switch (code) */
8468 #ifdef ENABLE_FOLD_CHECKING
8471 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8472 static void fold_check_failed (tree, tree);
8473 void print_fold_checksum (tree);
8475 /* When --enable-checking=fold, compute a digest of expr before
8476 and after actual fold call to see if fold did not accidentally
8477 change original expr. */
8484 unsigned char checksum_before[16], checksum_after[16];
8487 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8488 md5_init_ctx (&ctx);
8489 fold_checksum_tree (expr, &ctx, ht);
8490 md5_finish_ctx (&ctx, checksum_before);
8493 ret = fold_1 (expr);
8495 md5_init_ctx (&ctx);
8496 fold_checksum_tree (expr, &ctx, ht);
8497 md5_finish_ctx (&ctx, checksum_after);
8500 if (memcmp (checksum_before, checksum_after, 16))
8501 fold_check_failed (expr, ret);
8507 print_fold_checksum (tree expr)
8510 unsigned char checksum[16], cnt;
8513 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8514 md5_init_ctx (&ctx);
8515 fold_checksum_tree (expr, &ctx, ht);
8516 md5_finish_ctx (&ctx, checksum);
8518 for (cnt = 0; cnt < 16; ++cnt)
8519 fprintf (stderr, "%02x", checksum[cnt]);
8520 putc ('\n', stderr);
8524 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8526 internal_error ("fold check: original tree changed by fold");
8530 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8533 enum tree_code code;
8534 char buf[sizeof (struct tree_decl)];
8537 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8538 > sizeof (struct tree_decl)
8539 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8543 slot = htab_find_slot (ht, expr, INSERT);
8547 code = TREE_CODE (expr);
8548 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8550 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8551 memcpy (buf, expr, tree_size (expr));
8553 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8555 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8557 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8558 memcpy (buf, expr, tree_size (expr));
8560 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8562 else if (TREE_CODE_CLASS (code) == 't'
8563 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8565 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8566 memcpy (buf, expr, tree_size (expr));
8568 TYPE_POINTER_TO (expr) = NULL;
8569 TYPE_REFERENCE_TO (expr) = NULL;
8571 md5_process_bytes (expr, tree_size (expr), ctx);
8572 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8573 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8574 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8575 len = TREE_CODE_LENGTH (code);
8576 switch (TREE_CODE_CLASS (code))
8582 md5_process_bytes (TREE_STRING_POINTER (expr),
8583 TREE_STRING_LENGTH (expr), ctx);
8586 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8587 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8590 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8600 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8601 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8604 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8605 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8614 case SAVE_EXPR: len = 2; break;
8615 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8616 case RTL_EXPR: len = 0; break;
8617 case WITH_CLEANUP_EXPR: len = 2; break;
8626 for (i = 0; i < len; ++i)
8627 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8630 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8631 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8632 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8633 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8634 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8635 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8636 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8637 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8638 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8639 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8640 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8643 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8644 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8645 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8646 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8647 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8648 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8649 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8650 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8651 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8652 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8661 /* Perform constant folding and related simplification of initializer
8662 expression EXPR. This behaves identically to "fold" but ignores
8663 potential run-time traps and exceptions that fold must preserve. */
8666 fold_initializer (tree expr)
8668 int saved_signaling_nans = flag_signaling_nans;
8669 int saved_trapping_math = flag_trapping_math;
8670 int saved_trapv = flag_trapv;
8673 flag_signaling_nans = 0;
8674 flag_trapping_math = 0;
8677 result = fold (expr);
8679 flag_signaling_nans = saved_signaling_nans;
8680 flag_trapping_math = saved_trapping_math;
8681 flag_trapv = saved_trapv;
8686 /* Determine if first argument is a multiple of second argument. Return 0 if
8687 it is not, or we cannot easily determined it to be.
8689 An example of the sort of thing we care about (at this point; this routine
8690 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8691 fold cases do now) is discovering that
8693 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8699 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8701 This code also handles discovering that
8703 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8705 is a multiple of 8 so we don't have to worry about dealing with a
8708 Note that we *look* inside a SAVE_EXPR only to determine how it was
8709 calculated; it is not safe for fold to do much of anything else with the
8710 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8711 at run time. For example, the latter example above *cannot* be implemented
8712 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8713 evaluation time of the original SAVE_EXPR is not necessarily the same at
8714 the time the new expression is evaluated. The only optimization of this
8715 sort that would be valid is changing
8717 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8721 SAVE_EXPR (I) * SAVE_EXPR (J)
8723 (where the same SAVE_EXPR (J) is used in the original and the
8724 transformed version). */
8727 multiple_of_p (tree type, tree top, tree bottom)
8729 if (operand_equal_p (top, bottom, 0))
8732 if (TREE_CODE (type) != INTEGER_TYPE)
8735 switch (TREE_CODE (top))
8738 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8739 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8743 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8744 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8747 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8751 op1 = TREE_OPERAND (top, 1);
8752 /* const_binop may not detect overflow correctly,
8753 so check for it explicitly here. */
8754 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8755 > TREE_INT_CST_LOW (op1)
8756 && TREE_INT_CST_HIGH (op1) == 0
8757 && 0 != (t1 = fold_convert (type,
8758 const_binop (LSHIFT_EXPR,
8761 && ! TREE_OVERFLOW (t1))
8762 return multiple_of_p (type, t1, bottom);
8767 /* Can't handle conversions from non-integral or wider integral type. */
8768 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8769 || (TYPE_PRECISION (type)
8770 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8773 /* .. fall through ... */
8776 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8779 if (TREE_CODE (bottom) != INTEGER_CST
8780 || (TREE_UNSIGNED (type)
8781 && (tree_int_cst_sgn (top) < 0
8782 || tree_int_cst_sgn (bottom) < 0)))
8784 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8792 /* Return true if `t' is known to be non-negative. */
8795 tree_expr_nonnegative_p (tree t)
8797 switch (TREE_CODE (t))
8803 return tree_int_cst_sgn (t) >= 0;
8806 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8809 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8810 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8811 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8813 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8814 both unsigned and at least 2 bits shorter than the result. */
8815 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8816 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8817 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8819 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8820 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8821 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8822 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8824 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8825 TYPE_PRECISION (inner2)) + 1;
8826 return prec < TYPE_PRECISION (TREE_TYPE (t));
8832 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8834 /* x * x for floating point x is always non-negative. */
8835 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8837 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8838 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8841 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8842 both unsigned and their total bits is shorter than the result. */
8843 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8844 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8845 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8847 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8848 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8849 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8850 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8851 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8852 < TYPE_PRECISION (TREE_TYPE (t));
8856 case TRUNC_DIV_EXPR:
8858 case FLOOR_DIV_EXPR:
8859 case ROUND_DIV_EXPR:
8860 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8861 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8863 case TRUNC_MOD_EXPR:
8865 case FLOOR_MOD_EXPR:
8866 case ROUND_MOD_EXPR:
8867 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8870 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8871 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8875 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8876 tree outer_type = TREE_TYPE (t);
8878 if (TREE_CODE (outer_type) == REAL_TYPE)
8880 if (TREE_CODE (inner_type) == REAL_TYPE)
8881 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8882 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8884 if (TREE_UNSIGNED (inner_type))
8886 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8889 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8891 if (TREE_CODE (inner_type) == REAL_TYPE)
8892 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8893 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8894 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8895 && TREE_UNSIGNED (inner_type);
8901 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8902 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8904 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8906 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8907 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8909 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8910 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8912 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8914 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8916 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8917 case NON_LVALUE_EXPR:
8918 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8920 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8922 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8926 tree fndecl = get_callee_fndecl (t);
8927 tree arglist = TREE_OPERAND (t, 1);
8929 && DECL_BUILT_IN (fndecl)
8930 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8931 switch (DECL_FUNCTION_CODE (fndecl))
8934 case BUILT_IN_CABSL:
8935 case BUILT_IN_CABSF:
8940 case BUILT_IN_EXP2F:
8941 case BUILT_IN_EXP2L:
8942 case BUILT_IN_EXP10:
8943 case BUILT_IN_EXP10F:
8944 case BUILT_IN_EXP10L:
8946 case BUILT_IN_FABSF:
8947 case BUILT_IN_FABSL:
8950 case BUILT_IN_FFSLL:
8951 case BUILT_IN_PARITY:
8952 case BUILT_IN_PARITYL:
8953 case BUILT_IN_PARITYLL:
8954 case BUILT_IN_POPCOUNT:
8955 case BUILT_IN_POPCOUNTL:
8956 case BUILT_IN_POPCOUNTLL:
8957 case BUILT_IN_POW10:
8958 case BUILT_IN_POW10F:
8959 case BUILT_IN_POW10L:
8961 case BUILT_IN_SQRTF:
8962 case BUILT_IN_SQRTL:
8966 case BUILT_IN_ATANF:
8967 case BUILT_IN_ATANL:
8969 case BUILT_IN_CEILF:
8970 case BUILT_IN_CEILL:
8971 case BUILT_IN_FLOOR:
8972 case BUILT_IN_FLOORF:
8973 case BUILT_IN_FLOORL:
8974 case BUILT_IN_NEARBYINT:
8975 case BUILT_IN_NEARBYINTF:
8976 case BUILT_IN_NEARBYINTL:
8977 case BUILT_IN_ROUND:
8978 case BUILT_IN_ROUNDF:
8979 case BUILT_IN_ROUNDL:
8980 case BUILT_IN_TRUNC:
8981 case BUILT_IN_TRUNCF:
8982 case BUILT_IN_TRUNCL:
8983 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8988 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8995 /* ... fall through ... */
8998 if (truth_value_p (TREE_CODE (t)))
8999 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9003 /* We don't know sign of `t', so be conservative and return false. */
9007 /* Return true if `r' is known to be non-negative.
9008 Only handles constants at the moment. */
9011 rtl_expr_nonnegative_p (rtx r)
9013 switch (GET_CODE (r))
9016 return INTVAL (r) >= 0;
9019 if (GET_MODE (r) == VOIDmode)
9020 return CONST_DOUBLE_HIGH (r) >= 0;
9028 units = CONST_VECTOR_NUNITS (r);
9030 for (i = 0; i < units; ++i)
9032 elt = CONST_VECTOR_ELT (r, i);
9033 if (!rtl_expr_nonnegative_p (elt))
9042 /* These are always nonnegative. */
9050 #include "gt-fold-const.h"