1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
63 #include "langhooks.h"
66 /* Non-zero if we are folding constants inside an initializer; zero
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
92 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
93 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static tree combine_comparisons (enum tree_code, enum tree_code,
103 enum tree_code, tree, tree, tree);
104 static int truth_value_p (enum tree_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand (tree, tree, tree);
109 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (tree, tree, int, int, int);
111 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
115 static int all_ones_mask_p (tree, int);
116 static tree sign_bit_p (tree, tree);
117 static int simple_operand_p (tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
132 static int multiple_of_p (tree, tree, tree);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static int native_encode_expr (tree, unsigned char *, int);
146 static tree native_interpret_expr (tree, unsigned char *, int);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
160 We do that by representing the two-word integer in 4 words, with only
161 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
162 number. The value of the word is LOWPART + HIGHPART * BASE. */
165 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
166 #define HIGHPART(x) \
167 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
168 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170 /* Unpack a two-word integer into 4 words.
171 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
172 WORDS points to the array of HOST_WIDE_INTs. */
175 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 words[0] = LOWPART (low);
178 words[1] = HIGHPART (low);
179 words[2] = LOWPART (hi);
180 words[3] = HIGHPART (hi);
183 /* Pack an array of 4 words into a two-word integer.
184 WORDS points to the array of words.
185 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
188 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
191 *low = words[0] + words[1] * BASE;
192 *hi = words[2] + words[3] * BASE;
195 /* Force the double-word integer L1, H1 to be within the range of the
196 integer type TYPE. Stores the properly truncated and sign-extended
197 double-word integer in *LV, *HV. Returns true if the operation
198 overflows, that is, argument and result are different. */
201 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
202 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
204 unsigned HOST_WIDE_INT low0 = l1;
205 HOST_WIDE_INT high0 = h1;
207 int sign_extended_type;
209 if (POINTER_TYPE_P (type)
210 || TREE_CODE (type) == OFFSET_TYPE)
213 prec = TYPE_PRECISION (type);
215 /* Size types *are* sign extended. */
216 sign_extended_type = (!TYPE_UNSIGNED (type)
217 || (TREE_CODE (type) == INTEGER_TYPE
218 && TYPE_IS_SIZETYPE (type)));
220 /* First clear all bits that are beyond the type's precision. */
221 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
223 else if (prec > HOST_BITS_PER_WIDE_INT)
224 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
228 if (prec < HOST_BITS_PER_WIDE_INT)
229 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
232 /* Then do sign extension if necessary. */
233 if (!sign_extended_type)
234 /* No sign extension */;
235 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
236 /* Correct width already. */;
237 else if (prec > HOST_BITS_PER_WIDE_INT)
239 /* Sign extend top half? */
240 if (h1 & ((unsigned HOST_WIDE_INT)1
241 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
242 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 else if (prec == HOST_BITS_PER_WIDE_INT)
246 if ((HOST_WIDE_INT)l1 < 0)
251 /* Sign extend bottom half? */
252 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 l1 |= (HOST_WIDE_INT)(-1) << prec;
262 /* If the value didn't fit, signal overflow. */
263 return l1 != low0 || h1 != high0;
266 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We set TREE_CONSTANT_OVERFLOWED if,
277 CONST_OVERFLOWED is nonzero
278 or we set TREE_OVERFLOWED.
279 We return either the original T, or a copy. */
282 force_fit_type (tree t, int overflowable,
283 bool overflowed, bool overflowed_const)
285 unsigned HOST_WIDE_INT low;
287 int sign_extended_type;
290 gcc_assert (TREE_CODE (t) == INTEGER_CST);
292 /* Size types *are* sign extended. */
293 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
294 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
295 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
297 low = TREE_INT_CST_LOW (t);
298 high = TREE_INT_CST_HIGH (t);
300 overflow = fit_double_type (low, high, &low, &high, TREE_TYPE (t));
302 /* If the value changed, return a new node. */
303 if (overflowed || overflowed_const || overflow)
305 t = build_int_cst_wide (TREE_TYPE (t), low, high);
309 || (overflowable > 0 && sign_extended_type))
312 TREE_OVERFLOW (t) = 1;
313 TREE_CONSTANT_OVERFLOW (t) = 1;
315 else if (overflowed_const)
318 TREE_CONSTANT_OVERFLOW (t) = 1;
325 /* Add two doubleword integers with doubleword result.
326 Return nonzero if the operation overflows according to UNSIGNED_P.
327 Each argument is given as two `HOST_WIDE_INT' pieces.
328 One argument is L1 and H1; the other, L2 and H2.
329 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
332 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
333 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
334 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
337 unsigned HOST_WIDE_INT l;
341 h = h1 + h2 + (l < l1);
347 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
349 return OVERFLOW_SUM_SIGN (h1, h2, h);
352 /* Negate a doubleword integer with doubleword result.
353 Return nonzero if the operation overflows, assuming it's signed.
354 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
355 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
358 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
359 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
365 return (*hv & h1) < 0;
375 /* Multiply two doubleword integers with doubleword result.
376 Return nonzero if the operation overflows according to UNSIGNED_P.
377 Each argument is given as two `HOST_WIDE_INT' pieces.
378 One argument is L1 and H1; the other, L2 and H2.
379 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
382 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
383 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
384 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
387 HOST_WIDE_INT arg1[4];
388 HOST_WIDE_INT arg2[4];
389 HOST_WIDE_INT prod[4 * 2];
390 unsigned HOST_WIDE_INT carry;
392 unsigned HOST_WIDE_INT toplow, neglow;
393 HOST_WIDE_INT tophigh, neghigh;
395 encode (arg1, l1, h1);
396 encode (arg2, l2, h2);
398 memset (prod, 0, sizeof prod);
400 for (i = 0; i < 4; i++)
403 for (j = 0; j < 4; j++)
406 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
407 carry += arg1[i] * arg2[j];
408 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
410 prod[k] = LOWPART (carry);
411 carry = HIGHPART (carry);
416 decode (prod, lv, hv);
417 decode (prod + 4, &toplow, &tophigh);
419 /* Unsigned overflow is immediate. */
421 return (toplow | tophigh) != 0;
423 /* Check for signed overflow by calculating the signed representation of the
424 top half of the result; it should agree with the low half's sign bit. */
427 neg_double (l2, h2, &neglow, &neghigh);
428 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
432 neg_double (l1, h1, &neglow, &neghigh);
433 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
435 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
438 /* Shift the doubleword integer in L1, H1 left by COUNT places
439 keeping only PREC bits of result.
440 Shift right if COUNT is negative.
441 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
442 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
445 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
446 HOST_WIDE_INT count, unsigned int prec,
447 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
449 unsigned HOST_WIDE_INT signmask;
453 rshift_double (l1, h1, -count, prec, lv, hv, arith);
457 if (SHIFT_COUNT_TRUNCATED)
460 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
462 /* Shifting by the host word size is undefined according to the
463 ANSI standard, so we must handle this as a special case. */
467 else if (count >= HOST_BITS_PER_WIDE_INT)
469 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
474 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
475 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
479 /* Sign extend all bits that are beyond the precision. */
481 signmask = -((prec > HOST_BITS_PER_WIDE_INT
482 ? ((unsigned HOST_WIDE_INT) *hv
483 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
484 : (*lv >> (prec - 1))) & 1);
486 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
488 else if (prec >= HOST_BITS_PER_WIDE_INT)
490 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
491 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
496 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
497 *lv |= signmask << prec;
501 /* Shift the doubleword integer in L1, H1 right by COUNT places
502 keeping only PREC bits of result. COUNT must be positive.
503 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
504 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
507 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
508 HOST_WIDE_INT count, unsigned int prec,
509 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
512 unsigned HOST_WIDE_INT signmask;
515 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
518 if (SHIFT_COUNT_TRUNCATED)
521 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
523 /* Shifting by the host word size is undefined according to the
524 ANSI standard, so we must handle this as a special case. */
528 else if (count >= HOST_BITS_PER_WIDE_INT)
531 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
535 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
537 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
540 /* Zero / sign extend all bits that are beyond the precision. */
542 if (count >= (HOST_WIDE_INT)prec)
547 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
549 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
551 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
552 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
557 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
558 *lv |= signmask << (prec - count);
562 /* Rotate the doubleword integer in L1, H1 left by COUNT places
563 keeping only PREC bits of result.
564 Rotate right if COUNT is negative.
565 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
568 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
569 HOST_WIDE_INT count, unsigned int prec,
570 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
572 unsigned HOST_WIDE_INT s1l, s2l;
573 HOST_WIDE_INT s1h, s2h;
579 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
580 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
585 /* Rotate the doubleword integer in L1, H1 left by COUNT places
586 keeping only PREC bits of result. COUNT must be positive.
587 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
590 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
591 HOST_WIDE_INT count, unsigned int prec,
592 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
594 unsigned HOST_WIDE_INT s1l, s2l;
595 HOST_WIDE_INT s1h, s2h;
601 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
602 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
607 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
608 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
609 CODE is a tree code for a kind of division, one of
610 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
612 It controls how the quotient is rounded to an integer.
613 Return nonzero if the operation overflows.
614 UNS nonzero says do unsigned division. */
617 div_and_round_double (enum tree_code code, int uns,
618 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
619 HOST_WIDE_INT hnum_orig,
620 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
621 HOST_WIDE_INT hden_orig,
622 unsigned HOST_WIDE_INT *lquo,
623 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
627 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
628 HOST_WIDE_INT den[4], quo[4];
630 unsigned HOST_WIDE_INT work;
631 unsigned HOST_WIDE_INT carry = 0;
632 unsigned HOST_WIDE_INT lnum = lnum_orig;
633 HOST_WIDE_INT hnum = hnum_orig;
634 unsigned HOST_WIDE_INT lden = lden_orig;
635 HOST_WIDE_INT hden = hden_orig;
638 if (hden == 0 && lden == 0)
639 overflow = 1, lden = 1;
641 /* Calculate quotient sign and convert operands to unsigned. */
647 /* (minimum integer) / (-1) is the only overflow case. */
648 if (neg_double (lnum, hnum, &lnum, &hnum)
649 && ((HOST_WIDE_INT) lden & hden) == -1)
655 neg_double (lden, hden, &lden, &hden);
659 if (hnum == 0 && hden == 0)
660 { /* single precision */
662 /* This unsigned division rounds toward zero. */
668 { /* trivial case: dividend < divisor */
669 /* hden != 0 already checked. */
676 memset (quo, 0, sizeof quo);
678 memset (num, 0, sizeof num); /* to zero 9th element */
679 memset (den, 0, sizeof den);
681 encode (num, lnum, hnum);
682 encode (den, lden, hden);
684 /* Special code for when the divisor < BASE. */
685 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
687 /* hnum != 0 already checked. */
688 for (i = 4 - 1; i >= 0; i--)
690 work = num[i] + carry * BASE;
691 quo[i] = work / lden;
697 /* Full double precision division,
698 with thanks to Don Knuth's "Seminumerical Algorithms". */
699 int num_hi_sig, den_hi_sig;
700 unsigned HOST_WIDE_INT quo_est, scale;
702 /* Find the highest nonzero divisor digit. */
703 for (i = 4 - 1;; i--)
710 /* Insure that the first digit of the divisor is at least BASE/2.
711 This is required by the quotient digit estimation algorithm. */
713 scale = BASE / (den[den_hi_sig] + 1);
715 { /* scale divisor and dividend */
717 for (i = 0; i <= 4 - 1; i++)
719 work = (num[i] * scale) + carry;
720 num[i] = LOWPART (work);
721 carry = HIGHPART (work);
726 for (i = 0; i <= 4 - 1; i++)
728 work = (den[i] * scale) + carry;
729 den[i] = LOWPART (work);
730 carry = HIGHPART (work);
731 if (den[i] != 0) den_hi_sig = i;
738 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
740 /* Guess the next quotient digit, quo_est, by dividing the first
741 two remaining dividend digits by the high order quotient digit.
742 quo_est is never low and is at most 2 high. */
743 unsigned HOST_WIDE_INT tmp;
745 num_hi_sig = i + den_hi_sig + 1;
746 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
747 if (num[num_hi_sig] != den[den_hi_sig])
748 quo_est = work / den[den_hi_sig];
752 /* Refine quo_est so it's usually correct, and at most one high. */
753 tmp = work - quo_est * den[den_hi_sig];
755 && (den[den_hi_sig - 1] * quo_est
756 > (tmp * BASE + num[num_hi_sig - 2])))
759 /* Try QUO_EST as the quotient digit, by multiplying the
760 divisor by QUO_EST and subtracting from the remaining dividend.
761 Keep in mind that QUO_EST is the I - 1st digit. */
764 for (j = 0; j <= den_hi_sig; j++)
766 work = quo_est * den[j] + carry;
767 carry = HIGHPART (work);
768 work = num[i + j] - LOWPART (work);
769 num[i + j] = LOWPART (work);
770 carry += HIGHPART (work) != 0;
773 /* If quo_est was high by one, then num[i] went negative and
774 we need to correct things. */
775 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
778 carry = 0; /* add divisor back in */
779 for (j = 0; j <= den_hi_sig; j++)
781 work = num[i + j] + den[j] + carry;
782 carry = HIGHPART (work);
783 num[i + j] = LOWPART (work);
786 num [num_hi_sig] += carry;
789 /* Store the quotient digit. */
794 decode (quo, lquo, hquo);
797 /* If result is negative, make it so. */
799 neg_double (*lquo, *hquo, lquo, hquo);
801 /* Compute trial remainder: rem = num - (quo * den) */
802 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
803 neg_double (*lrem, *hrem, lrem, hrem);
804 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
809 case TRUNC_MOD_EXPR: /* round toward zero */
810 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
814 case FLOOR_MOD_EXPR: /* round toward negative infinity */
815 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
826 case CEIL_MOD_EXPR: /* round toward positive infinity */
827 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
829 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
837 case ROUND_MOD_EXPR: /* round to closest integer */
839 unsigned HOST_WIDE_INT labs_rem = *lrem;
840 HOST_WIDE_INT habs_rem = *hrem;
841 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
842 HOST_WIDE_INT habs_den = hden, htwice;
844 /* Get absolute values. */
846 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
848 neg_double (lden, hden, &labs_den, &habs_den);
850 /* If (2 * abs (lrem) >= abs (lden)) */
851 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
852 labs_rem, habs_rem, <wice, &htwice);
854 if (((unsigned HOST_WIDE_INT) habs_den
855 < (unsigned HOST_WIDE_INT) htwice)
856 || (((unsigned HOST_WIDE_INT) habs_den
857 == (unsigned HOST_WIDE_INT) htwice)
858 && (labs_den < ltwice)))
862 add_double (*lquo, *hquo,
863 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
866 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
878 /* Compute true remainder: rem = num - (quo * den) */
879 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
880 neg_double (*lrem, *hrem, lrem, hrem);
881 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
885 /* If ARG2 divides ARG1 with zero remainder, carries out the division
886 of type CODE and returns the quotient.
887 Otherwise returns NULL_TREE. */
890 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
892 unsigned HOST_WIDE_INT int1l, int2l;
893 HOST_WIDE_INT int1h, int2h;
894 unsigned HOST_WIDE_INT quol, reml;
895 HOST_WIDE_INT quoh, remh;
896 tree type = TREE_TYPE (arg1);
897 int uns = TYPE_UNSIGNED (type);
899 int1l = TREE_INT_CST_LOW (arg1);
900 int1h = TREE_INT_CST_HIGH (arg1);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
909 return build_int_cst_wide (type, quol, quoh);
912 /* Return true if the built-in mathematical function specified by CODE
913 is odd, i.e. -f(x) == f(-x). */
916 negate_mathfn_p (enum built_in_function code)
920 CASE_FLT_FN (BUILT_IN_ASIN):
921 CASE_FLT_FN (BUILT_IN_ASINH):
922 CASE_FLT_FN (BUILT_IN_ATAN):
923 CASE_FLT_FN (BUILT_IN_ATANH):
924 CASE_FLT_FN (BUILT_IN_CBRT):
925 CASE_FLT_FN (BUILT_IN_ERF):
926 CASE_FLT_FN (BUILT_IN_LLROUND):
927 CASE_FLT_FN (BUILT_IN_LROUND):
928 CASE_FLT_FN (BUILT_IN_ROUND):
929 CASE_FLT_FN (BUILT_IN_SIN):
930 CASE_FLT_FN (BUILT_IN_SINH):
931 CASE_FLT_FN (BUILT_IN_TAN):
932 CASE_FLT_FN (BUILT_IN_TANH):
933 CASE_FLT_FN (BUILT_IN_TRUNC):
936 CASE_FLT_FN (BUILT_IN_LLRINT):
937 CASE_FLT_FN (BUILT_IN_LRINT):
938 CASE_FLT_FN (BUILT_IN_NEARBYINT):
939 CASE_FLT_FN (BUILT_IN_RINT):
940 return !flag_rounding_math;
948 /* Check whether we may negate an integer constant T without causing
952 may_negate_without_overflow_p (tree t)
954 unsigned HOST_WIDE_INT val;
958 gcc_assert (TREE_CODE (t) == INTEGER_CST);
960 type = TREE_TYPE (t);
961 if (TYPE_UNSIGNED (type))
964 prec = TYPE_PRECISION (type);
965 if (prec > HOST_BITS_PER_WIDE_INT)
967 if (TREE_INT_CST_LOW (t) != 0)
969 prec -= HOST_BITS_PER_WIDE_INT;
970 val = TREE_INT_CST_HIGH (t);
973 val = TREE_INT_CST_LOW (t);
974 if (prec < HOST_BITS_PER_WIDE_INT)
975 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
976 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
979 /* Determine whether an expression T can be cheaply negated using
980 the function negate_expr without introducing undefined overflow. */
983 negate_expr_p (tree t)
990 type = TREE_TYPE (t);
993 switch (TREE_CODE (t))
996 if (TYPE_UNSIGNED (type)
997 || (flag_wrapv && ! flag_trapv))
1000 /* Check that -CST will not overflow type. */
1001 return may_negate_without_overflow_p (t);
1003 return INTEGRAL_TYPE_P (type)
1004 && (TYPE_UNSIGNED (type)
1005 || (flag_wrapv && !flag_trapv));
1012 return negate_expr_p (TREE_REALPART (t))
1013 && negate_expr_p (TREE_IMAGPART (t));
1016 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1017 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1019 /* -(A + B) -> (-B) - A. */
1020 if (negate_expr_p (TREE_OPERAND (t, 1))
1021 && reorder_operands_p (TREE_OPERAND (t, 0),
1022 TREE_OPERAND (t, 1)))
1024 /* -(A + B) -> (-A) - B. */
1025 return negate_expr_p (TREE_OPERAND (t, 0));
1028 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1029 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1030 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1031 && reorder_operands_p (TREE_OPERAND (t, 0),
1032 TREE_OPERAND (t, 1));
1035 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1041 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1042 return negate_expr_p (TREE_OPERAND (t, 1))
1043 || negate_expr_p (TREE_OPERAND (t, 0));
1046 case TRUNC_DIV_EXPR:
1047 case ROUND_DIV_EXPR:
1048 case FLOOR_DIV_EXPR:
1050 case EXACT_DIV_EXPR:
1051 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1053 return negate_expr_p (TREE_OPERAND (t, 1))
1054 || negate_expr_p (TREE_OPERAND (t, 0));
1057 /* Negate -((double)float) as (double)(-float). */
1058 if (TREE_CODE (type) == REAL_TYPE)
1060 tree tem = strip_float_extensions (t);
1062 return negate_expr_p (tem);
1067 /* Negate -f(x) as f(-x). */
1068 if (negate_mathfn_p (builtin_mathfn_code (t)))
1069 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1073 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1074 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1076 tree op1 = TREE_OPERAND (t, 1);
1077 if (TREE_INT_CST_HIGH (op1) == 0
1078 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1079 == TREE_INT_CST_LOW (op1))
1090 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1091 simplification is possible.
1092 If negate_expr_p would return true for T, NULL_TREE will never be
1096 fold_negate_expr (tree t)
1098 tree type = TREE_TYPE (t);
1101 switch (TREE_CODE (t))
1103 /* Convert - (~A) to A + 1. */
1105 if (INTEGRAL_TYPE_P (type))
1106 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1107 build_int_cst (type, 1));
1111 tem = fold_negate_const (t, type);
1112 if (! TREE_OVERFLOW (tem)
1113 || TYPE_UNSIGNED (type)
1119 tem = fold_negate_const (t, type);
1120 /* Two's complement FP formats, such as c4x, may overflow. */
1121 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1127 tree rpart = negate_expr (TREE_REALPART (t));
1128 tree ipart = negate_expr (TREE_IMAGPART (t));
1130 if ((TREE_CODE (rpart) == REAL_CST
1131 && TREE_CODE (ipart) == REAL_CST)
1132 || (TREE_CODE (rpart) == INTEGER_CST
1133 && TREE_CODE (ipart) == INTEGER_CST))
1134 return build_complex (type, rpart, ipart);
1139 return TREE_OPERAND (t, 0);
1142 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1143 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1145 /* -(A + B) -> (-B) - A. */
1146 if (negate_expr_p (TREE_OPERAND (t, 1))
1147 && reorder_operands_p (TREE_OPERAND (t, 0),
1148 TREE_OPERAND (t, 1)))
1150 tem = negate_expr (TREE_OPERAND (t, 1));
1151 return fold_build2 (MINUS_EXPR, type,
1152 tem, TREE_OPERAND (t, 0));
1155 /* -(A + B) -> (-A) - B. */
1156 if (negate_expr_p (TREE_OPERAND (t, 0)))
1158 tem = negate_expr (TREE_OPERAND (t, 0));
1159 return fold_build2 (MINUS_EXPR, type,
1160 tem, TREE_OPERAND (t, 1));
1166 /* - (A - B) -> B - A */
1167 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1168 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1169 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1170 return fold_build2 (MINUS_EXPR, type,
1171 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1175 if (TYPE_UNSIGNED (type))
1181 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1183 tem = TREE_OPERAND (t, 1);
1184 if (negate_expr_p (tem))
1185 return fold_build2 (TREE_CODE (t), type,
1186 TREE_OPERAND (t, 0), negate_expr (tem));
1187 tem = TREE_OPERAND (t, 0);
1188 if (negate_expr_p (tem))
1189 return fold_build2 (TREE_CODE (t), type,
1190 negate_expr (tem), TREE_OPERAND (t, 1));
1194 case TRUNC_DIV_EXPR:
1195 case ROUND_DIV_EXPR:
1196 case FLOOR_DIV_EXPR:
1198 case EXACT_DIV_EXPR:
1199 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1201 tem = TREE_OPERAND (t, 1);
1202 if (negate_expr_p (tem))
1203 return fold_build2 (TREE_CODE (t), type,
1204 TREE_OPERAND (t, 0), negate_expr (tem));
1205 tem = TREE_OPERAND (t, 0);
1206 if (negate_expr_p (tem))
1207 return fold_build2 (TREE_CODE (t), type,
1208 negate_expr (tem), TREE_OPERAND (t, 1));
1213 /* Convert -((double)float) into (double)(-float). */
1214 if (TREE_CODE (type) == REAL_TYPE)
1216 tem = strip_float_extensions (t);
1217 if (tem != t && negate_expr_p (tem))
1218 return negate_expr (tem);
1223 /* Negate -f(x) as f(-x). */
1224 if (negate_mathfn_p (builtin_mathfn_code (t))
1225 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1227 tree fndecl, arg, arglist;
1229 fndecl = get_callee_fndecl (t);
1230 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1231 arglist = build_tree_list (NULL_TREE, arg);
1232 return build_function_call_expr (fndecl, arglist);
1237 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1238 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1240 tree op1 = TREE_OPERAND (t, 1);
1241 if (TREE_INT_CST_HIGH (op1) == 0
1242 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1243 == TREE_INT_CST_LOW (op1))
1245 tree ntype = TYPE_UNSIGNED (type)
1246 ? lang_hooks.types.signed_type (type)
1247 : lang_hooks.types.unsigned_type (type);
1248 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1249 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1250 return fold_convert (type, temp);
1262 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1263 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1264 return NULL_TREE. */
1267 negate_expr (tree t)
1274 type = TREE_TYPE (t);
1275 STRIP_SIGN_NOPS (t);
1277 tem = fold_negate_expr (t);
1279 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1280 return fold_convert (type, tem);
1283 /* Split a tree IN into a constant, literal and variable parts that could be
1284 combined with CODE to make IN. "constant" means an expression with
1285 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1286 commutative arithmetic operation. Store the constant part into *CONP,
1287 the literal in *LITP and return the variable part. If a part isn't
1288 present, set it to null. If the tree does not decompose in this way,
1289 return the entire tree as the variable part and the other parts as null.
1291 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1292 case, we negate an operand that was subtracted. Except if it is a
1293 literal for which we use *MINUS_LITP instead.
1295 If NEGATE_P is true, we are negating all of IN, again except a literal
1296 for which we use *MINUS_LITP instead.
1298 If IN is itself a literal or constant, return it as appropriate.
1300 Note that we do not guarantee that any of the three values will be the
1301 same type as IN, but they will have the same signedness and mode. */
1304 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1305 tree *minus_litp, int negate_p)
1313 /* Strip any conversions that don't change the machine mode or signedness. */
1314 STRIP_SIGN_NOPS (in);
1316 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1318 else if (TREE_CODE (in) == code
1319 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1320 /* We can associate addition and subtraction together (even
1321 though the C standard doesn't say so) for integers because
1322 the value is not affected. For reals, the value might be
1323 affected, so we can't. */
1324 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1325 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1327 tree op0 = TREE_OPERAND (in, 0);
1328 tree op1 = TREE_OPERAND (in, 1);
1329 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1330 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1332 /* First see if either of the operands is a literal, then a constant. */
1333 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1334 *litp = op0, op0 = 0;
1335 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1336 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1338 if (op0 != 0 && TREE_CONSTANT (op0))
1339 *conp = op0, op0 = 0;
1340 else if (op1 != 0 && TREE_CONSTANT (op1))
1341 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1343 /* If we haven't dealt with either operand, this is not a case we can
1344 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1345 if (op0 != 0 && op1 != 0)
1350 var = op1, neg_var_p = neg1_p;
1352 /* Now do any needed negations. */
1354 *minus_litp = *litp, *litp = 0;
1356 *conp = negate_expr (*conp);
1358 var = negate_expr (var);
1360 else if (TREE_CONSTANT (in))
1368 *minus_litp = *litp, *litp = 0;
1369 else if (*minus_litp)
1370 *litp = *minus_litp, *minus_litp = 0;
1371 *conp = negate_expr (*conp);
1372 var = negate_expr (var);
1378 /* Re-associate trees split by the above function. T1 and T2 are either
1379 expressions to associate or null. Return the new expression, if any. If
1380 we build an operation, do it in TYPE and with CODE. */
1383 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1390 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1391 try to fold this since we will have infinite recursion. But do
1392 deal with any NEGATE_EXPRs. */
1393 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1394 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1396 if (code == PLUS_EXPR)
1398 if (TREE_CODE (t1) == NEGATE_EXPR)
1399 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1400 fold_convert (type, TREE_OPERAND (t1, 0)));
1401 else if (TREE_CODE (t2) == NEGATE_EXPR)
1402 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1403 fold_convert (type, TREE_OPERAND (t2, 0)));
1404 else if (integer_zerop (t2))
1405 return fold_convert (type, t1);
1407 else if (code == MINUS_EXPR)
1409 if (integer_zerop (t2))
1410 return fold_convert (type, t1);
1413 return build2 (code, type, fold_convert (type, t1),
1414 fold_convert (type, t2));
1417 return fold_build2 (code, type, fold_convert (type, t1),
1418 fold_convert (type, t2));
1421 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1422 for use in int_const_binop, size_binop and size_diffop. */
1425 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1427 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1429 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1444 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1445 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1446 && TYPE_MODE (type1) == TYPE_MODE (type2);
1450 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1451 to produce a new constant. Return NULL_TREE if we don't know how
1452 to evaluate CODE at compile-time.
1454 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1457 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1459 unsigned HOST_WIDE_INT int1l, int2l;
1460 HOST_WIDE_INT int1h, int2h;
1461 unsigned HOST_WIDE_INT low;
1463 unsigned HOST_WIDE_INT garbagel;
1464 HOST_WIDE_INT garbageh;
1466 tree type = TREE_TYPE (arg1);
1467 int uns = TYPE_UNSIGNED (type);
1469 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1472 int1l = TREE_INT_CST_LOW (arg1);
1473 int1h = TREE_INT_CST_HIGH (arg1);
1474 int2l = TREE_INT_CST_LOW (arg2);
1475 int2h = TREE_INT_CST_HIGH (arg2);
1480 low = int1l | int2l, hi = int1h | int2h;
1484 low = int1l ^ int2l, hi = int1h ^ int2h;
1488 low = int1l & int2l, hi = int1h & int2h;
1494 /* It's unclear from the C standard whether shifts can overflow.
1495 The following code ignores overflow; perhaps a C standard
1496 interpretation ruling is needed. */
1497 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1504 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1509 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1513 neg_double (int2l, int2h, &low, &hi);
1514 add_double (int1l, int1h, low, hi, &low, &hi);
1515 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1519 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1522 case TRUNC_DIV_EXPR:
1523 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1524 case EXACT_DIV_EXPR:
1525 /* This is a shortcut for a common special case. */
1526 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1527 && ! TREE_CONSTANT_OVERFLOW (arg1)
1528 && ! TREE_CONSTANT_OVERFLOW (arg2)
1529 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1531 if (code == CEIL_DIV_EXPR)
1534 low = int1l / int2l, hi = 0;
1538 /* ... fall through ... */
1540 case ROUND_DIV_EXPR:
1541 if (int2h == 0 && int2l == 0)
1543 if (int2h == 0 && int2l == 1)
1545 low = int1l, hi = int1h;
1548 if (int1l == int2l && int1h == int2h
1549 && ! (int1l == 0 && int1h == 0))
1554 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1555 &low, &hi, &garbagel, &garbageh);
1558 case TRUNC_MOD_EXPR:
1559 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1560 /* This is a shortcut for a common special case. */
1561 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1562 && ! TREE_CONSTANT_OVERFLOW (arg1)
1563 && ! TREE_CONSTANT_OVERFLOW (arg2)
1564 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1566 if (code == CEIL_MOD_EXPR)
1568 low = int1l % int2l, hi = 0;
1572 /* ... fall through ... */
1574 case ROUND_MOD_EXPR:
1575 if (int2h == 0 && int2l == 0)
1577 overflow = div_and_round_double (code, uns,
1578 int1l, int1h, int2l, int2h,
1579 &garbagel, &garbageh, &low, &hi);
1585 low = (((unsigned HOST_WIDE_INT) int1h
1586 < (unsigned HOST_WIDE_INT) int2h)
1587 || (((unsigned HOST_WIDE_INT) int1h
1588 == (unsigned HOST_WIDE_INT) int2h)
1591 low = (int1h < int2h
1592 || (int1h == int2h && int1l < int2l));
1594 if (low == (code == MIN_EXPR))
1595 low = int1l, hi = int1h;
1597 low = int2l, hi = int2h;
1604 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1608 /* Propagate overflow flags ourselves. */
1609 if (((!uns || is_sizetype) && overflow)
1610 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1613 TREE_OVERFLOW (t) = 1;
1614 TREE_CONSTANT_OVERFLOW (t) = 1;
1616 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1619 TREE_CONSTANT_OVERFLOW (t) = 1;
1623 t = force_fit_type (t, 1,
1624 ((!uns || is_sizetype) && overflow)
1625 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1626 TREE_CONSTANT_OVERFLOW (arg1)
1627 | TREE_CONSTANT_OVERFLOW (arg2));
1632 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1633 constant. We assume ARG1 and ARG2 have the same data type, or at least
1634 are the same kind of constant and the same machine mode. Return zero if
1635 combining the constants is not allowed in the current operating mode.
1637 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1640 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1642 /* Sanity check for the recursive cases. */
1649 if (TREE_CODE (arg1) == INTEGER_CST)
1650 return int_const_binop (code, arg1, arg2, notrunc);
1652 if (TREE_CODE (arg1) == REAL_CST)
1654 enum machine_mode mode;
1657 REAL_VALUE_TYPE value;
1658 REAL_VALUE_TYPE result;
1662 /* The following codes are handled by real_arithmetic. */
1677 d1 = TREE_REAL_CST (arg1);
1678 d2 = TREE_REAL_CST (arg2);
1680 type = TREE_TYPE (arg1);
1681 mode = TYPE_MODE (type);
1683 /* Don't perform operation if we honor signaling NaNs and
1684 either operand is a NaN. */
1685 if (HONOR_SNANS (mode)
1686 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1689 /* Don't perform operation if it would raise a division
1690 by zero exception. */
1691 if (code == RDIV_EXPR
1692 && REAL_VALUES_EQUAL (d2, dconst0)
1693 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1696 /* If either operand is a NaN, just return it. Otherwise, set up
1697 for floating-point trap; we return an overflow. */
1698 if (REAL_VALUE_ISNAN (d1))
1700 else if (REAL_VALUE_ISNAN (d2))
1703 inexact = real_arithmetic (&value, code, &d1, &d2);
1704 real_convert (&result, mode, &value);
1706 /* Don't constant fold this floating point operation if
1707 the result has overflowed and flag_trapping_math. */
1708 if (flag_trapping_math
1709 && MODE_HAS_INFINITIES (mode)
1710 && REAL_VALUE_ISINF (result)
1711 && !REAL_VALUE_ISINF (d1)
1712 && !REAL_VALUE_ISINF (d2))
1715 /* Don't constant fold this floating point operation if the
1716 result may dependent upon the run-time rounding mode and
1717 flag_rounding_math is set, or if GCC's software emulation
1718 is unable to accurately represent the result. */
1719 if ((flag_rounding_math
1720 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1721 && !flag_unsafe_math_optimizations))
1722 && (inexact || !real_identical (&result, &value)))
1725 t = build_real (type, result);
1727 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1728 TREE_CONSTANT_OVERFLOW (t)
1730 | TREE_CONSTANT_OVERFLOW (arg1)
1731 | TREE_CONSTANT_OVERFLOW (arg2);
1735 if (TREE_CODE (arg1) == COMPLEX_CST)
1737 tree type = TREE_TYPE (arg1);
1738 tree r1 = TREE_REALPART (arg1);
1739 tree i1 = TREE_IMAGPART (arg1);
1740 tree r2 = TREE_REALPART (arg2);
1741 tree i2 = TREE_IMAGPART (arg2);
1748 real = const_binop (code, r1, r2, notrunc);
1749 imag = const_binop (code, i1, i2, notrunc);
1753 real = const_binop (MINUS_EXPR,
1754 const_binop (MULT_EXPR, r1, r2, notrunc),
1755 const_binop (MULT_EXPR, i1, i2, notrunc),
1757 imag = const_binop (PLUS_EXPR,
1758 const_binop (MULT_EXPR, r1, i2, notrunc),
1759 const_binop (MULT_EXPR, i1, r2, notrunc),
1766 = const_binop (PLUS_EXPR,
1767 const_binop (MULT_EXPR, r2, r2, notrunc),
1768 const_binop (MULT_EXPR, i2, i2, notrunc),
1771 = const_binop (PLUS_EXPR,
1772 const_binop (MULT_EXPR, r1, r2, notrunc),
1773 const_binop (MULT_EXPR, i1, i2, notrunc),
1776 = const_binop (MINUS_EXPR,
1777 const_binop (MULT_EXPR, i1, r2, notrunc),
1778 const_binop (MULT_EXPR, r1, i2, notrunc),
1781 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1782 code = TRUNC_DIV_EXPR;
1784 real = const_binop (code, t1, magsquared, notrunc);
1785 imag = const_binop (code, t2, magsquared, notrunc);
1794 return build_complex (type, real, imag);
1800 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1801 indicates which particular sizetype to create. */
1804 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1806 return build_int_cst (sizetype_tab[(int) kind], number);
1809 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1810 is a tree code. The type of the result is taken from the operands.
1811 Both must be equivalent integer types, ala int_binop_types_match_p.
1812 If the operands are constant, so is the result. */
1815 size_binop (enum tree_code code, tree arg0, tree arg1)
1817 tree type = TREE_TYPE (arg0);
1819 if (arg0 == error_mark_node || arg1 == error_mark_node)
1820 return error_mark_node;
1822 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1825 /* Handle the special case of two integer constants faster. */
1826 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1828 /* And some specific cases even faster than that. */
1829 if (code == PLUS_EXPR && integer_zerop (arg0))
1831 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1832 && integer_zerop (arg1))
1834 else if (code == MULT_EXPR && integer_onep (arg0))
1837 /* Handle general case of two integer constants. */
1838 return int_const_binop (code, arg0, arg1, 0);
1841 return fold_build2 (code, type, arg0, arg1);
1844 /* Given two values, either both of sizetype or both of bitsizetype,
1845 compute the difference between the two values. Return the value
1846 in signed type corresponding to the type of the operands. */
1849 size_diffop (tree arg0, tree arg1)
1851 tree type = TREE_TYPE (arg0);
1854 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1857 /* If the type is already signed, just do the simple thing. */
1858 if (!TYPE_UNSIGNED (type))
1859 return size_binop (MINUS_EXPR, arg0, arg1);
1861 if (type == sizetype)
1863 else if (type == bitsizetype)
1864 ctype = sbitsizetype;
1866 ctype = lang_hooks.types.signed_type (type);
1868 /* If either operand is not a constant, do the conversions to the signed
1869 type and subtract. The hardware will do the right thing with any
1870 overflow in the subtraction. */
1871 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1872 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1873 fold_convert (ctype, arg1));
1875 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1876 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1877 overflow) and negate (which can't either). Special-case a result
1878 of zero while we're here. */
1879 if (tree_int_cst_equal (arg0, arg1))
1880 return build_int_cst (ctype, 0);
1881 else if (tree_int_cst_lt (arg1, arg0))
1882 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1884 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1885 fold_convert (ctype, size_binop (MINUS_EXPR,
1889 /* A subroutine of fold_convert_const handling conversions of an
1890 INTEGER_CST to another integer type. */
1893 fold_convert_const_int_from_int (tree type, tree arg1)
1897 /* Given an integer constant, make new constant with new type,
1898 appropriately sign-extended or truncated. */
1899 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1900 TREE_INT_CST_HIGH (arg1));
1902 t = force_fit_type (t,
1903 /* Don't set the overflow when
1904 converting a pointer */
1905 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1906 (TREE_INT_CST_HIGH (arg1) < 0
1907 && (TYPE_UNSIGNED (type)
1908 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1909 | TREE_OVERFLOW (arg1),
1910 TREE_CONSTANT_OVERFLOW (arg1));
1915 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1916 to an integer type. */
1919 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1924 /* The following code implements the floating point to integer
1925 conversion rules required by the Java Language Specification,
1926 that IEEE NaNs are mapped to zero and values that overflow
1927 the target precision saturate, i.e. values greater than
1928 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1929 are mapped to INT_MIN. These semantics are allowed by the
1930 C and C++ standards that simply state that the behavior of
1931 FP-to-integer conversion is unspecified upon overflow. */
1933 HOST_WIDE_INT high, low;
1935 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1939 case FIX_TRUNC_EXPR:
1940 real_trunc (&r, VOIDmode, &x);
1947 /* If R is NaN, return zero and show we have an overflow. */
1948 if (REAL_VALUE_ISNAN (r))
1955 /* See if R is less than the lower bound or greater than the
1960 tree lt = TYPE_MIN_VALUE (type);
1961 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1962 if (REAL_VALUES_LESS (r, l))
1965 high = TREE_INT_CST_HIGH (lt);
1966 low = TREE_INT_CST_LOW (lt);
1972 tree ut = TYPE_MAX_VALUE (type);
1975 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1976 if (REAL_VALUES_LESS (u, r))
1979 high = TREE_INT_CST_HIGH (ut);
1980 low = TREE_INT_CST_LOW (ut);
1986 REAL_VALUE_TO_INT (&low, &high, r);
1988 t = build_int_cst_wide (type, low, high);
1990 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1991 TREE_CONSTANT_OVERFLOW (arg1));
1995 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1996 to another floating point type. */
1999 fold_convert_const_real_from_real (tree type, tree arg1)
2001 REAL_VALUE_TYPE value;
2004 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2005 t = build_real (type, value);
2007 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2008 TREE_CONSTANT_OVERFLOW (t)
2009 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2013 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2014 type TYPE. If no simplification can be done return NULL_TREE. */
2017 fold_convert_const (enum tree_code code, tree type, tree arg1)
2019 if (TREE_TYPE (arg1) == type)
2022 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2024 if (TREE_CODE (arg1) == INTEGER_CST)
2025 return fold_convert_const_int_from_int (type, arg1);
2026 else if (TREE_CODE (arg1) == REAL_CST)
2027 return fold_convert_const_int_from_real (code, type, arg1);
2029 else if (TREE_CODE (type) == REAL_TYPE)
2031 if (TREE_CODE (arg1) == INTEGER_CST)
2032 return build_real_from_int_cst (type, arg1);
2033 if (TREE_CODE (arg1) == REAL_CST)
2034 return fold_convert_const_real_from_real (type, arg1);
2039 /* Construct a vector of zero elements of vector type TYPE. */
2042 build_zero_vector (tree type)
2047 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2048 units = TYPE_VECTOR_SUBPARTS (type);
2051 for (i = 0; i < units; i++)
2052 list = tree_cons (NULL_TREE, elem, list);
2053 return build_vector (type, list);
2056 /* Convert expression ARG to type TYPE. Used by the middle-end for
2057 simple conversions in preference to calling the front-end's convert. */
2060 fold_convert (tree type, tree arg)
2062 tree orig = TREE_TYPE (arg);
2068 if (TREE_CODE (arg) == ERROR_MARK
2069 || TREE_CODE (type) == ERROR_MARK
2070 || TREE_CODE (orig) == ERROR_MARK)
2071 return error_mark_node;
2073 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2074 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2075 TYPE_MAIN_VARIANT (orig)))
2076 return fold_build1 (NOP_EXPR, type, arg);
2078 switch (TREE_CODE (type))
2080 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2081 case POINTER_TYPE: case REFERENCE_TYPE:
2083 if (TREE_CODE (arg) == INTEGER_CST)
2085 tem = fold_convert_const (NOP_EXPR, type, arg);
2086 if (tem != NULL_TREE)
2089 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2090 || TREE_CODE (orig) == OFFSET_TYPE)
2091 return fold_build1 (NOP_EXPR, type, arg);
2092 if (TREE_CODE (orig) == COMPLEX_TYPE)
2094 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2095 return fold_convert (type, tem);
2097 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2098 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2099 return fold_build1 (NOP_EXPR, type, arg);
2102 if (TREE_CODE (arg) == INTEGER_CST)
2104 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2105 if (tem != NULL_TREE)
2108 else if (TREE_CODE (arg) == REAL_CST)
2110 tem = fold_convert_const (NOP_EXPR, type, arg);
2111 if (tem != NULL_TREE)
2115 switch (TREE_CODE (orig))
2118 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2119 case POINTER_TYPE: case REFERENCE_TYPE:
2120 return fold_build1 (FLOAT_EXPR, type, arg);
2123 return fold_build1 (NOP_EXPR, type, arg);
2126 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2127 return fold_convert (type, tem);
2134 switch (TREE_CODE (orig))
2137 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2138 case POINTER_TYPE: case REFERENCE_TYPE:
2140 return build2 (COMPLEX_EXPR, type,
2141 fold_convert (TREE_TYPE (type), arg),
2142 fold_convert (TREE_TYPE (type), integer_zero_node));
2147 if (TREE_CODE (arg) == COMPLEX_EXPR)
2149 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2150 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2151 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2154 arg = save_expr (arg);
2155 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2156 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2157 rpart = fold_convert (TREE_TYPE (type), rpart);
2158 ipart = fold_convert (TREE_TYPE (type), ipart);
2159 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2167 if (integer_zerop (arg))
2168 return build_zero_vector (type);
2169 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2170 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2171 || TREE_CODE (orig) == VECTOR_TYPE);
2172 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2175 tem = fold_ignored_result (arg);
2176 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2178 return fold_build1 (NOP_EXPR, type, tem);
2185 /* Return false if expr can be assumed not to be an lvalue, true
2189 maybe_lvalue_p (tree x)
2191 /* We only need to wrap lvalue tree codes. */
2192 switch (TREE_CODE (x))
2203 case ALIGN_INDIRECT_REF:
2204 case MISALIGNED_INDIRECT_REF:
2206 case ARRAY_RANGE_REF:
2212 case PREINCREMENT_EXPR:
2213 case PREDECREMENT_EXPR:
2215 case TRY_CATCH_EXPR:
2216 case WITH_CLEANUP_EXPR:
2219 case GIMPLE_MODIFY_STMT:
2228 /* Assume the worst for front-end tree codes. */
2229 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2237 /* Return an expr equal to X but certainly not valid as an lvalue. */
2242 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2247 if (! maybe_lvalue_p (x))
2249 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2252 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2253 Zero means allow extended lvalues. */
2255 int pedantic_lvalues;
2257 /* When pedantic, return an expr equal to X but certainly not valid as a
2258 pedantic lvalue. Otherwise, return X. */
2261 pedantic_non_lvalue (tree x)
2263 if (pedantic_lvalues)
2264 return non_lvalue (x);
2269 /* Given a tree comparison code, return the code that is the logical inverse
2270 of the given code. It is not safe to do this for floating-point
2271 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2272 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2275 invert_tree_comparison (enum tree_code code, bool honor_nans)
2277 if (honor_nans && flag_trapping_math)
2287 return honor_nans ? UNLE_EXPR : LE_EXPR;
2289 return honor_nans ? UNLT_EXPR : LT_EXPR;
2291 return honor_nans ? UNGE_EXPR : GE_EXPR;
2293 return honor_nans ? UNGT_EXPR : GT_EXPR;
2307 return UNORDERED_EXPR;
2308 case UNORDERED_EXPR:
2309 return ORDERED_EXPR;
2315 /* Similar, but return the comparison that results if the operands are
2316 swapped. This is safe for floating-point. */
2319 swap_tree_comparison (enum tree_code code)
2326 case UNORDERED_EXPR:
2352 /* Convert a comparison tree code from an enum tree_code representation
2353 into a compcode bit-based encoding. This function is the inverse of
2354 compcode_to_comparison. */
2356 static enum comparison_code
2357 comparison_to_compcode (enum tree_code code)
2374 return COMPCODE_ORD;
2375 case UNORDERED_EXPR:
2376 return COMPCODE_UNORD;
2378 return COMPCODE_UNLT;
2380 return COMPCODE_UNEQ;
2382 return COMPCODE_UNLE;
2384 return COMPCODE_UNGT;
2386 return COMPCODE_LTGT;
2388 return COMPCODE_UNGE;
2394 /* Convert a compcode bit-based encoding of a comparison operator back
2395 to GCC's enum tree_code representation. This function is the
2396 inverse of comparison_to_compcode. */
2398 static enum tree_code
2399 compcode_to_comparison (enum comparison_code code)
2416 return ORDERED_EXPR;
2417 case COMPCODE_UNORD:
2418 return UNORDERED_EXPR;
2436 /* Return a tree for the comparison which is the combination of
2437 doing the AND or OR (depending on CODE) of the two operations LCODE
2438 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2439 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2440 if this makes the transformation invalid. */
2443 combine_comparisons (enum tree_code code, enum tree_code lcode,
2444 enum tree_code rcode, tree truth_type,
2445 tree ll_arg, tree lr_arg)
2447 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2448 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2449 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2450 enum comparison_code compcode;
2454 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2455 compcode = lcompcode & rcompcode;
2458 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2459 compcode = lcompcode | rcompcode;
2468 /* Eliminate unordered comparisons, as well as LTGT and ORD
2469 which are not used unless the mode has NaNs. */
2470 compcode &= ~COMPCODE_UNORD;
2471 if (compcode == COMPCODE_LTGT)
2472 compcode = COMPCODE_NE;
2473 else if (compcode == COMPCODE_ORD)
2474 compcode = COMPCODE_TRUE;
2476 else if (flag_trapping_math)
2478 /* Check that the original operation and the optimized ones will trap
2479 under the same condition. */
2480 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2481 && (lcompcode != COMPCODE_EQ)
2482 && (lcompcode != COMPCODE_ORD);
2483 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2484 && (rcompcode != COMPCODE_EQ)
2485 && (rcompcode != COMPCODE_ORD);
2486 bool trap = (compcode & COMPCODE_UNORD) == 0
2487 && (compcode != COMPCODE_EQ)
2488 && (compcode != COMPCODE_ORD);
2490 /* In a short-circuited boolean expression the LHS might be
2491 such that the RHS, if evaluated, will never trap. For
2492 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2493 if neither x nor y is NaN. (This is a mixed blessing: for
2494 example, the expression above will never trap, hence
2495 optimizing it to x < y would be invalid). */
2496 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2497 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2500 /* If the comparison was short-circuited, and only the RHS
2501 trapped, we may now generate a spurious trap. */
2503 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2506 /* If we changed the conditions that cause a trap, we lose. */
2507 if ((ltrap || rtrap) != trap)
2511 if (compcode == COMPCODE_TRUE)
2512 return constant_boolean_node (true, truth_type);
2513 else if (compcode == COMPCODE_FALSE)
2514 return constant_boolean_node (false, truth_type);
2516 return fold_build2 (compcode_to_comparison (compcode),
2517 truth_type, ll_arg, lr_arg);
2520 /* Return nonzero if CODE is a tree code that represents a truth value. */
2523 truth_value_p (enum tree_code code)
2525 return (TREE_CODE_CLASS (code) == tcc_comparison
2526 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2527 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2528 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2531 /* Return nonzero if two operands (typically of the same tree node)
2532 are necessarily equal. If either argument has side-effects this
2533 function returns zero. FLAGS modifies behavior as follows:
2535 If OEP_ONLY_CONST is set, only return nonzero for constants.
2536 This function tests whether the operands are indistinguishable;
2537 it does not test whether they are equal using C's == operation.
2538 The distinction is important for IEEE floating point, because
2539 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2540 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2542 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2543 even though it may hold multiple values during a function.
2544 This is because a GCC tree node guarantees that nothing else is
2545 executed between the evaluation of its "operands" (which may often
2546 be evaluated in arbitrary order). Hence if the operands themselves
2547 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2548 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2549 unset means assuming isochronic (or instantaneous) tree equivalence.
2550 Unless comparing arbitrary expression trees, such as from different
2551 statements, this flag can usually be left unset.
2553 If OEP_PURE_SAME is set, then pure functions with identical arguments
2554 are considered the same. It is used when the caller has other ways
2555 to ensure that global memory is unchanged in between. */
2558 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2560 /* If either is ERROR_MARK, they aren't equal. */
2561 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2564 /* If both types don't have the same signedness, then we can't consider
2565 them equal. We must check this before the STRIP_NOPS calls
2566 because they may change the signedness of the arguments. */
2567 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2570 /* If both types don't have the same precision, then it is not safe
2572 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2578 /* In case both args are comparisons but with different comparison
2579 code, try to swap the comparison operands of one arg to produce
2580 a match and compare that variant. */
2581 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2582 && COMPARISON_CLASS_P (arg0)
2583 && COMPARISON_CLASS_P (arg1))
2585 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2587 if (TREE_CODE (arg0) == swap_code)
2588 return operand_equal_p (TREE_OPERAND (arg0, 0),
2589 TREE_OPERAND (arg1, 1), flags)
2590 && operand_equal_p (TREE_OPERAND (arg0, 1),
2591 TREE_OPERAND (arg1, 0), flags);
2594 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2595 /* This is needed for conversions and for COMPONENT_REF.
2596 Might as well play it safe and always test this. */
2597 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2598 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2599 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2602 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2603 We don't care about side effects in that case because the SAVE_EXPR
2604 takes care of that for us. In all other cases, two expressions are
2605 equal if they have no side effects. If we have two identical
2606 expressions with side effects that should be treated the same due
2607 to the only side effects being identical SAVE_EXPR's, that will
2608 be detected in the recursive calls below. */
2609 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2610 && (TREE_CODE (arg0) == SAVE_EXPR
2611 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2614 /* Next handle constant cases, those for which we can return 1 even
2615 if ONLY_CONST is set. */
2616 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2617 switch (TREE_CODE (arg0))
2620 return tree_int_cst_equal (arg0, arg1);
2623 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2624 TREE_REAL_CST (arg1)))
2628 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2630 /* If we do not distinguish between signed and unsigned zero,
2631 consider them equal. */
2632 if (real_zerop (arg0) && real_zerop (arg1))
2641 v1 = TREE_VECTOR_CST_ELTS (arg0);
2642 v2 = TREE_VECTOR_CST_ELTS (arg1);
2645 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2648 v1 = TREE_CHAIN (v1);
2649 v2 = TREE_CHAIN (v2);
2656 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2658 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2662 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2663 && ! memcmp (TREE_STRING_POINTER (arg0),
2664 TREE_STRING_POINTER (arg1),
2665 TREE_STRING_LENGTH (arg0)));
2668 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2674 if (flags & OEP_ONLY_CONST)
2677 /* Define macros to test an operand from arg0 and arg1 for equality and a
2678 variant that allows null and views null as being different from any
2679 non-null value. In the latter case, if either is null, the both
2680 must be; otherwise, do the normal comparison. */
2681 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2682 TREE_OPERAND (arg1, N), flags)
2684 #define OP_SAME_WITH_NULL(N) \
2685 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2686 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2688 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2691 /* Two conversions are equal only if signedness and modes match. */
2692 switch (TREE_CODE (arg0))
2696 case FIX_TRUNC_EXPR:
2697 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2698 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2708 case tcc_comparison:
2710 if (OP_SAME (0) && OP_SAME (1))
2713 /* For commutative ops, allow the other order. */
2714 return (commutative_tree_code (TREE_CODE (arg0))
2715 && operand_equal_p (TREE_OPERAND (arg0, 0),
2716 TREE_OPERAND (arg1, 1), flags)
2717 && operand_equal_p (TREE_OPERAND (arg0, 1),
2718 TREE_OPERAND (arg1, 0), flags));
2721 /* If either of the pointer (or reference) expressions we are
2722 dereferencing contain a side effect, these cannot be equal. */
2723 if (TREE_SIDE_EFFECTS (arg0)
2724 || TREE_SIDE_EFFECTS (arg1))
2727 switch (TREE_CODE (arg0))
2730 case ALIGN_INDIRECT_REF:
2731 case MISALIGNED_INDIRECT_REF:
2737 case ARRAY_RANGE_REF:
2738 /* Operands 2 and 3 may be null. */
2741 && OP_SAME_WITH_NULL (2)
2742 && OP_SAME_WITH_NULL (3));
2745 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2746 may be NULL when we're called to compare MEM_EXPRs. */
2747 return OP_SAME_WITH_NULL (0)
2749 && OP_SAME_WITH_NULL (2);
2752 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2758 case tcc_expression:
2759 switch (TREE_CODE (arg0))
2762 case TRUTH_NOT_EXPR:
2765 case TRUTH_ANDIF_EXPR:
2766 case TRUTH_ORIF_EXPR:
2767 return OP_SAME (0) && OP_SAME (1);
2769 case TRUTH_AND_EXPR:
2771 case TRUTH_XOR_EXPR:
2772 if (OP_SAME (0) && OP_SAME (1))
2775 /* Otherwise take into account this is a commutative operation. */
2776 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2777 TREE_OPERAND (arg1, 1), flags)
2778 && operand_equal_p (TREE_OPERAND (arg0, 1),
2779 TREE_OPERAND (arg1, 0), flags));
2782 /* If the CALL_EXPRs call different functions, then they
2783 clearly can not be equal. */
2788 unsigned int cef = call_expr_flags (arg0);
2789 if (flags & OEP_PURE_SAME)
2790 cef &= ECF_CONST | ECF_PURE;
2797 /* Now see if all the arguments are the same. operand_equal_p
2798 does not handle TREE_LIST, so we walk the operands here
2799 feeding them to operand_equal_p. */
2800 arg0 = TREE_OPERAND (arg0, 1);
2801 arg1 = TREE_OPERAND (arg1, 1);
2802 while (arg0 && arg1)
2804 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2808 arg0 = TREE_CHAIN (arg0);
2809 arg1 = TREE_CHAIN (arg1);
2812 /* If we get here and both argument lists are exhausted
2813 then the CALL_EXPRs are equal. */
2814 return ! (arg0 || arg1);
2820 case tcc_declaration:
2821 /* Consider __builtin_sqrt equal to sqrt. */
2822 return (TREE_CODE (arg0) == FUNCTION_DECL
2823 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2824 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2825 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2832 #undef OP_SAME_WITH_NULL
2835 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2836 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2838 When in doubt, return 0. */
2841 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2843 int unsignedp1, unsignedpo;
2844 tree primarg0, primarg1, primother;
2845 unsigned int correct_width;
2847 if (operand_equal_p (arg0, arg1, 0))
2850 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2851 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2854 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2855 and see if the inner values are the same. This removes any
2856 signedness comparison, which doesn't matter here. */
2857 primarg0 = arg0, primarg1 = arg1;
2858 STRIP_NOPS (primarg0);
2859 STRIP_NOPS (primarg1);
2860 if (operand_equal_p (primarg0, primarg1, 0))
2863 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2864 actual comparison operand, ARG0.
2866 First throw away any conversions to wider types
2867 already present in the operands. */
2869 primarg1 = get_narrower (arg1, &unsignedp1);
2870 primother = get_narrower (other, &unsignedpo);
2872 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2873 if (unsignedp1 == unsignedpo
2874 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2875 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2877 tree type = TREE_TYPE (arg0);
2879 /* Make sure shorter operand is extended the right way
2880 to match the longer operand. */
2881 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2882 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2884 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2891 /* See if ARG is an expression that is either a comparison or is performing
2892 arithmetic on comparisons. The comparisons must only be comparing
2893 two different values, which will be stored in *CVAL1 and *CVAL2; if
2894 they are nonzero it means that some operands have already been found.
2895 No variables may be used anywhere else in the expression except in the
2896 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2897 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2899 If this is true, return 1. Otherwise, return zero. */
2902 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2904 enum tree_code code = TREE_CODE (arg);
2905 enum tree_code_class class = TREE_CODE_CLASS (code);
2907 /* We can handle some of the tcc_expression cases here. */
2908 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2910 else if (class == tcc_expression
2911 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2912 || code == COMPOUND_EXPR))
2915 else if (class == tcc_expression && code == SAVE_EXPR
2916 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2918 /* If we've already found a CVAL1 or CVAL2, this expression is
2919 two complex to handle. */
2920 if (*cval1 || *cval2)
2930 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2933 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2934 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2935 cval1, cval2, save_p));
2940 case tcc_expression:
2941 if (code == COND_EXPR)
2942 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2943 cval1, cval2, save_p)
2944 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2945 cval1, cval2, save_p)
2946 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2947 cval1, cval2, save_p));
2950 case tcc_comparison:
2951 /* First see if we can handle the first operand, then the second. For
2952 the second operand, we know *CVAL1 can't be zero. It must be that
2953 one side of the comparison is each of the values; test for the
2954 case where this isn't true by failing if the two operands
2957 if (operand_equal_p (TREE_OPERAND (arg, 0),
2958 TREE_OPERAND (arg, 1), 0))
2962 *cval1 = TREE_OPERAND (arg, 0);
2963 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2965 else if (*cval2 == 0)
2966 *cval2 = TREE_OPERAND (arg, 0);
2967 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2972 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2974 else if (*cval2 == 0)
2975 *cval2 = TREE_OPERAND (arg, 1);
2976 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2988 /* ARG is a tree that is known to contain just arithmetic operations and
2989 comparisons. Evaluate the operations in the tree substituting NEW0 for
2990 any occurrence of OLD0 as an operand of a comparison and likewise for
2994 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2996 tree type = TREE_TYPE (arg);
2997 enum tree_code code = TREE_CODE (arg);
2998 enum tree_code_class class = TREE_CODE_CLASS (code);
3000 /* We can handle some of the tcc_expression cases here. */
3001 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3003 else if (class == tcc_expression
3004 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3010 return fold_build1 (code, type,
3011 eval_subst (TREE_OPERAND (arg, 0),
3012 old0, new0, old1, new1));
3015 return fold_build2 (code, type,
3016 eval_subst (TREE_OPERAND (arg, 0),
3017 old0, new0, old1, new1),
3018 eval_subst (TREE_OPERAND (arg, 1),
3019 old0, new0, old1, new1));
3021 case tcc_expression:
3025 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3028 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3031 return fold_build3 (code, type,
3032 eval_subst (TREE_OPERAND (arg, 0),
3033 old0, new0, old1, new1),
3034 eval_subst (TREE_OPERAND (arg, 1),
3035 old0, new0, old1, new1),
3036 eval_subst (TREE_OPERAND (arg, 2),
3037 old0, new0, old1, new1));
3041 /* Fall through - ??? */
3043 case tcc_comparison:
3045 tree arg0 = TREE_OPERAND (arg, 0);
3046 tree arg1 = TREE_OPERAND (arg, 1);
3048 /* We need to check both for exact equality and tree equality. The
3049 former will be true if the operand has a side-effect. In that
3050 case, we know the operand occurred exactly once. */
3052 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3054 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3057 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3059 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3062 return fold_build2 (code, type, arg0, arg1);
3070 /* Return a tree for the case when the result of an expression is RESULT
3071 converted to TYPE and OMITTED was previously an operand of the expression
3072 but is now not needed (e.g., we folded OMITTED * 0).
3074 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3075 the conversion of RESULT to TYPE. */
3078 omit_one_operand (tree type, tree result, tree omitted)
3080 tree t = fold_convert (type, result);
3082 if (TREE_SIDE_EFFECTS (omitted))
3083 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3085 return non_lvalue (t);
3088 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3091 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3093 tree t = fold_convert (type, result);
3095 if (TREE_SIDE_EFFECTS (omitted))
3096 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3098 return pedantic_non_lvalue (t);
3101 /* Return a tree for the case when the result of an expression is RESULT
3102 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3103 of the expression but are now not needed.
3105 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3106 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3107 evaluated before OMITTED2. Otherwise, if neither has side effects,
3108 just do the conversion of RESULT to TYPE. */
3111 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3113 tree t = fold_convert (type, result);
3115 if (TREE_SIDE_EFFECTS (omitted2))
3116 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3117 if (TREE_SIDE_EFFECTS (omitted1))
3118 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3120 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3124 /* Return a simplified tree node for the truth-negation of ARG. This
3125 never alters ARG itself. We assume that ARG is an operation that
3126 returns a truth value (0 or 1).
3128 FIXME: one would think we would fold the result, but it causes
3129 problems with the dominator optimizer. */
3132 fold_truth_not_expr (tree arg)
3134 tree type = TREE_TYPE (arg);
3135 enum tree_code code = TREE_CODE (arg);
3137 /* If this is a comparison, we can simply invert it, except for
3138 floating-point non-equality comparisons, in which case we just
3139 enclose a TRUTH_NOT_EXPR around what we have. */
3141 if (TREE_CODE_CLASS (code) == tcc_comparison)
3143 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3144 if (FLOAT_TYPE_P (op_type)
3145 && flag_trapping_math
3146 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3147 && code != NE_EXPR && code != EQ_EXPR)
3151 code = invert_tree_comparison (code,
3152 HONOR_NANS (TYPE_MODE (op_type)));
3153 if (code == ERROR_MARK)
3156 return build2 (code, type,
3157 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3164 return constant_boolean_node (integer_zerop (arg), type);
3166 case TRUTH_AND_EXPR:
3167 return build2 (TRUTH_OR_EXPR, type,
3168 invert_truthvalue (TREE_OPERAND (arg, 0)),
3169 invert_truthvalue (TREE_OPERAND (arg, 1)));
3172 return build2 (TRUTH_AND_EXPR, type,
3173 invert_truthvalue (TREE_OPERAND (arg, 0)),
3174 invert_truthvalue (TREE_OPERAND (arg, 1)));
3176 case TRUTH_XOR_EXPR:
3177 /* Here we can invert either operand. We invert the first operand
3178 unless the second operand is a TRUTH_NOT_EXPR in which case our
3179 result is the XOR of the first operand with the inside of the
3180 negation of the second operand. */
3182 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3183 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3184 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3186 return build2 (TRUTH_XOR_EXPR, type,
3187 invert_truthvalue (TREE_OPERAND (arg, 0)),
3188 TREE_OPERAND (arg, 1));
3190 case TRUTH_ANDIF_EXPR:
3191 return build2 (TRUTH_ORIF_EXPR, type,
3192 invert_truthvalue (TREE_OPERAND (arg, 0)),
3193 invert_truthvalue (TREE_OPERAND (arg, 1)));
3195 case TRUTH_ORIF_EXPR:
3196 return build2 (TRUTH_ANDIF_EXPR, type,
3197 invert_truthvalue (TREE_OPERAND (arg, 0)),
3198 invert_truthvalue (TREE_OPERAND (arg, 1)));
3200 case TRUTH_NOT_EXPR:
3201 return TREE_OPERAND (arg, 0);
3205 tree arg1 = TREE_OPERAND (arg, 1);
3206 tree arg2 = TREE_OPERAND (arg, 2);
3207 /* A COND_EXPR may have a throw as one operand, which
3208 then has void type. Just leave void operands
3210 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3211 VOID_TYPE_P (TREE_TYPE (arg1))
3212 ? arg1 : invert_truthvalue (arg1),
3213 VOID_TYPE_P (TREE_TYPE (arg2))
3214 ? arg2 : invert_truthvalue (arg2));
3218 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3219 invert_truthvalue (TREE_OPERAND (arg, 1)));
3221 case NON_LVALUE_EXPR:
3222 return invert_truthvalue (TREE_OPERAND (arg, 0));
3225 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3226 return build1 (TRUTH_NOT_EXPR, type, arg);
3230 return build1 (TREE_CODE (arg), type,
3231 invert_truthvalue (TREE_OPERAND (arg, 0)));
3234 if (!integer_onep (TREE_OPERAND (arg, 1)))
3236 return build2 (EQ_EXPR, type, arg,
3237 build_int_cst (type, 0));
3240 return build1 (TRUTH_NOT_EXPR, type, arg);
3242 case CLEANUP_POINT_EXPR:
3243 return build1 (CLEANUP_POINT_EXPR, type,
3244 invert_truthvalue (TREE_OPERAND (arg, 0)));
3253 /* Return a simplified tree node for the truth-negation of ARG. This
3254 never alters ARG itself. We assume that ARG is an operation that
3255 returns a truth value (0 or 1).
3257 FIXME: one would think we would fold the result, but it causes
3258 problems with the dominator optimizer. */
3261 invert_truthvalue (tree arg)
3265 if (TREE_CODE (arg) == ERROR_MARK)
3268 tem = fold_truth_not_expr (arg);
3270 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3275 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3276 operands are another bit-wise operation with a common input. If so,
3277 distribute the bit operations to save an operation and possibly two if
3278 constants are involved. For example, convert
3279 (A | B) & (A | C) into A | (B & C)
3280 Further simplification will occur if B and C are constants.
3282 If this optimization cannot be done, 0 will be returned. */
3285 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3290 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3291 || TREE_CODE (arg0) == code
3292 || (TREE_CODE (arg0) != BIT_AND_EXPR
3293 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3296 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3298 common = TREE_OPERAND (arg0, 0);
3299 left = TREE_OPERAND (arg0, 1);
3300 right = TREE_OPERAND (arg1, 1);
3302 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3304 common = TREE_OPERAND (arg0, 0);
3305 left = TREE_OPERAND (arg0, 1);
3306 right = TREE_OPERAND (arg1, 0);
3308 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3310 common = TREE_OPERAND (arg0, 1);
3311 left = TREE_OPERAND (arg0, 0);
3312 right = TREE_OPERAND (arg1, 1);
3314 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3316 common = TREE_OPERAND (arg0, 1);
3317 left = TREE_OPERAND (arg0, 0);
3318 right = TREE_OPERAND (arg1, 0);
3323 return fold_build2 (TREE_CODE (arg0), type, common,
3324 fold_build2 (code, type, left, right));
3327 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3328 with code CODE. This optimization is unsafe. */
3330 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3332 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3333 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3335 /* (A / C) +- (B / C) -> (A +- B) / C. */
3337 && operand_equal_p (TREE_OPERAND (arg0, 1),
3338 TREE_OPERAND (arg1, 1), 0))
3339 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3340 fold_build2 (code, type,
3341 TREE_OPERAND (arg0, 0),
3342 TREE_OPERAND (arg1, 0)),
3343 TREE_OPERAND (arg0, 1));
3345 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3346 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3347 TREE_OPERAND (arg1, 0), 0)
3348 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3349 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3351 REAL_VALUE_TYPE r0, r1;
3352 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3353 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3355 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3357 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3358 real_arithmetic (&r0, code, &r0, &r1);
3359 return fold_build2 (MULT_EXPR, type,
3360 TREE_OPERAND (arg0, 0),
3361 build_real (type, r0));
3367 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3368 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3371 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3378 tree size = TYPE_SIZE (TREE_TYPE (inner));
3379 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3380 || POINTER_TYPE_P (TREE_TYPE (inner)))
3381 && host_integerp (size, 0)
3382 && tree_low_cst (size, 0) == bitsize)
3383 return fold_convert (type, inner);
3386 result = build3 (BIT_FIELD_REF, type, inner,
3387 size_int (bitsize), bitsize_int (bitpos));
3389 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3394 /* Optimize a bit-field compare.
3396 There are two cases: First is a compare against a constant and the
3397 second is a comparison of two items where the fields are at the same
3398 bit position relative to the start of a chunk (byte, halfword, word)
3399 large enough to contain it. In these cases we can avoid the shift
3400 implicit in bitfield extractions.
3402 For constants, we emit a compare of the shifted constant with the
3403 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3404 compared. For two fields at the same position, we do the ANDs with the
3405 similar mask and compare the result of the ANDs.
3407 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3408 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3409 are the left and right operands of the comparison, respectively.
3411 If the optimization described above can be done, we return the resulting
3412 tree. Otherwise we return zero. */
3415 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3418 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3419 tree type = TREE_TYPE (lhs);
3420 tree signed_type, unsigned_type;
3421 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3422 enum machine_mode lmode, rmode, nmode;
3423 int lunsignedp, runsignedp;
3424 int lvolatilep = 0, rvolatilep = 0;
3425 tree linner, rinner = NULL_TREE;
3429 /* Get all the information about the extractions being done. If the bit size
3430 if the same as the size of the underlying object, we aren't doing an
3431 extraction at all and so can do nothing. We also don't want to
3432 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3433 then will no longer be able to replace it. */
3434 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3435 &lunsignedp, &lvolatilep, false);
3436 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3437 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3442 /* If this is not a constant, we can only do something if bit positions,
3443 sizes, and signedness are the same. */
3444 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3445 &runsignedp, &rvolatilep, false);
3447 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3448 || lunsignedp != runsignedp || offset != 0
3449 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3453 /* See if we can find a mode to refer to this field. We should be able to,
3454 but fail if we can't. */
3455 nmode = get_best_mode (lbitsize, lbitpos,
3456 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3457 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3458 TYPE_ALIGN (TREE_TYPE (rinner))),
3459 word_mode, lvolatilep || rvolatilep);
3460 if (nmode == VOIDmode)
3463 /* Set signed and unsigned types of the precision of this mode for the
3465 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3466 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3468 /* Compute the bit position and size for the new reference and our offset
3469 within it. If the new reference is the same size as the original, we
3470 won't optimize anything, so return zero. */
3471 nbitsize = GET_MODE_BITSIZE (nmode);
3472 nbitpos = lbitpos & ~ (nbitsize - 1);
3474 if (nbitsize == lbitsize)
3477 if (BYTES_BIG_ENDIAN)
3478 lbitpos = nbitsize - lbitsize - lbitpos;
3480 /* Make the mask to be used against the extracted field. */
3481 mask = build_int_cst_type (unsigned_type, -1);
3482 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3483 mask = const_binop (RSHIFT_EXPR, mask,
3484 size_int (nbitsize - lbitsize - lbitpos), 0);
3487 /* If not comparing with constant, just rework the comparison
3489 return fold_build2 (code, compare_type,
3490 fold_build2 (BIT_AND_EXPR, unsigned_type,
3491 make_bit_field_ref (linner,
3496 fold_build2 (BIT_AND_EXPR, unsigned_type,
3497 make_bit_field_ref (rinner,
3503 /* Otherwise, we are handling the constant case. See if the constant is too
3504 big for the field. Warn and return a tree of for 0 (false) if so. We do
3505 this not only for its own sake, but to avoid having to test for this
3506 error case below. If we didn't, we might generate wrong code.
3508 For unsigned fields, the constant shifted right by the field length should
3509 be all zero. For signed fields, the high-order bits should agree with
3514 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3515 fold_convert (unsigned_type, rhs),
3516 size_int (lbitsize), 0)))
3518 warning (0, "comparison is always %d due to width of bit-field",
3520 return constant_boolean_node (code == NE_EXPR, compare_type);
3525 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3526 size_int (lbitsize - 1), 0);
3527 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3529 warning (0, "comparison is always %d due to width of bit-field",
3531 return constant_boolean_node (code == NE_EXPR, compare_type);
3535 /* Single-bit compares should always be against zero. */
3536 if (lbitsize == 1 && ! integer_zerop (rhs))
3538 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3539 rhs = build_int_cst (type, 0);
3542 /* Make a new bitfield reference, shift the constant over the
3543 appropriate number of bits and mask it with the computed mask
3544 (in case this was a signed field). If we changed it, make a new one. */
3545 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3548 TREE_SIDE_EFFECTS (lhs) = 1;
3549 TREE_THIS_VOLATILE (lhs) = 1;
3552 rhs = const_binop (BIT_AND_EXPR,
3553 const_binop (LSHIFT_EXPR,
3554 fold_convert (unsigned_type, rhs),
3555 size_int (lbitpos), 0),
3558 return build2 (code, compare_type,
3559 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3563 /* Subroutine for fold_truthop: decode a field reference.
3565 If EXP is a comparison reference, we return the innermost reference.
3567 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3568 set to the starting bit number.
3570 If the innermost field can be completely contained in a mode-sized
3571 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3573 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3574 otherwise it is not changed.
3576 *PUNSIGNEDP is set to the signedness of the field.
3578 *PMASK is set to the mask used. This is either contained in a
3579 BIT_AND_EXPR or derived from the width of the field.
3581 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3583 Return 0 if this is not a component reference or is one that we can't
3584 do anything with. */
3587 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3588 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3589 int *punsignedp, int *pvolatilep,
3590 tree *pmask, tree *pand_mask)
3592 tree outer_type = 0;
3594 tree mask, inner, offset;
3596 unsigned int precision;
3598 /* All the optimizations using this function assume integer fields.
3599 There are problems with FP fields since the type_for_size call
3600 below can fail for, e.g., XFmode. */
3601 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3604 /* We are interested in the bare arrangement of bits, so strip everything
3605 that doesn't affect the machine mode. However, record the type of the
3606 outermost expression if it may matter below. */
3607 if (TREE_CODE (exp) == NOP_EXPR
3608 || TREE_CODE (exp) == CONVERT_EXPR
3609 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3610 outer_type = TREE_TYPE (exp);
3613 if (TREE_CODE (exp) == BIT_AND_EXPR)
3615 and_mask = TREE_OPERAND (exp, 1);
3616 exp = TREE_OPERAND (exp, 0);
3617 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3618 if (TREE_CODE (and_mask) != INTEGER_CST)
3622 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3623 punsignedp, pvolatilep, false);
3624 if ((inner == exp && and_mask == 0)
3625 || *pbitsize < 0 || offset != 0
3626 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3629 /* If the number of bits in the reference is the same as the bitsize of
3630 the outer type, then the outer type gives the signedness. Otherwise
3631 (in case of a small bitfield) the signedness is unchanged. */
3632 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3633 *punsignedp = TYPE_UNSIGNED (outer_type);
3635 /* Compute the mask to access the bitfield. */
3636 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3637 precision = TYPE_PRECISION (unsigned_type);
3639 mask = build_int_cst_type (unsigned_type, -1);
3641 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3642 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3644 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3646 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3647 fold_convert (unsigned_type, and_mask), mask);
3650 *pand_mask = and_mask;
3654 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3658 all_ones_mask_p (tree mask, int size)
3660 tree type = TREE_TYPE (mask);
3661 unsigned int precision = TYPE_PRECISION (type);
3664 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3667 tree_int_cst_equal (mask,
3668 const_binop (RSHIFT_EXPR,
3669 const_binop (LSHIFT_EXPR, tmask,
3670 size_int (precision - size),
3672 size_int (precision - size), 0));
3675 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3676 represents the sign bit of EXP's type. If EXP represents a sign
3677 or zero extension, also test VAL against the unextended type.
3678 The return value is the (sub)expression whose sign bit is VAL,
3679 or NULL_TREE otherwise. */
3682 sign_bit_p (tree exp, tree val)
3684 unsigned HOST_WIDE_INT mask_lo, lo;
3685 HOST_WIDE_INT mask_hi, hi;
3689 /* Tree EXP must have an integral type. */
3690 t = TREE_TYPE (exp);
3691 if (! INTEGRAL_TYPE_P (t))
3694 /* Tree VAL must be an integer constant. */
3695 if (TREE_CODE (val) != INTEGER_CST
3696 || TREE_CONSTANT_OVERFLOW (val))
3699 width = TYPE_PRECISION (t);
3700 if (width > HOST_BITS_PER_WIDE_INT)
3702 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3705 mask_hi = ((unsigned HOST_WIDE_INT) -1
3706 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3712 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3715 mask_lo = ((unsigned HOST_WIDE_INT) -1
3716 >> (HOST_BITS_PER_WIDE_INT - width));
3719 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3720 treat VAL as if it were unsigned. */
3721 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3722 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3725 /* Handle extension from a narrower type. */
3726 if (TREE_CODE (exp) == NOP_EXPR
3727 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3728 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3733 /* Subroutine for fold_truthop: determine if an operand is simple enough
3734 to be evaluated unconditionally. */
3737 simple_operand_p (tree exp)
3739 /* Strip any conversions that don't change the machine mode. */
3742 return (CONSTANT_CLASS_P (exp)
3743 || TREE_CODE (exp) == SSA_NAME
3745 && ! TREE_ADDRESSABLE (exp)
3746 && ! TREE_THIS_VOLATILE (exp)
3747 && ! DECL_NONLOCAL (exp)
3748 /* Don't regard global variables as simple. They may be
3749 allocated in ways unknown to the compiler (shared memory,
3750 #pragma weak, etc). */
3751 && ! TREE_PUBLIC (exp)
3752 && ! DECL_EXTERNAL (exp)
3753 /* Loading a static variable is unduly expensive, but global
3754 registers aren't expensive. */
3755 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3758 /* The following functions are subroutines to fold_range_test and allow it to
3759 try to change a logical combination of comparisons into a range test.
3762 X == 2 || X == 3 || X == 4 || X == 5
3766 (unsigned) (X - 2) <= 3
3768 We describe each set of comparisons as being either inside or outside
3769 a range, using a variable named like IN_P, and then describe the
3770 range with a lower and upper bound. If one of the bounds is omitted,
3771 it represents either the highest or lowest value of the type.
3773 In the comments below, we represent a range by two numbers in brackets
3774 preceded by a "+" to designate being inside that range, or a "-" to
3775 designate being outside that range, so the condition can be inverted by
3776 flipping the prefix. An omitted bound is represented by a "-". For
3777 example, "- [-, 10]" means being outside the range starting at the lowest
3778 possible value and ending at 10, in other words, being greater than 10.
3779 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3782 We set up things so that the missing bounds are handled in a consistent
3783 manner so neither a missing bound nor "true" and "false" need to be
3784 handled using a special case. */
3786 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3787 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3788 and UPPER1_P are nonzero if the respective argument is an upper bound
3789 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3790 must be specified for a comparison. ARG1 will be converted to ARG0's
3791 type if both are specified. */
3794 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3795 tree arg1, int upper1_p)
3801 /* If neither arg represents infinity, do the normal operation.
3802 Else, if not a comparison, return infinity. Else handle the special
3803 comparison rules. Note that most of the cases below won't occur, but
3804 are handled for consistency. */
3806 if (arg0 != 0 && arg1 != 0)
3808 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3809 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3811 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3814 if (TREE_CODE_CLASS (code) != tcc_comparison)
3817 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3818 for neither. In real maths, we cannot assume open ended ranges are
3819 the same. But, this is computer arithmetic, where numbers are finite.
3820 We can therefore make the transformation of any unbounded range with
3821 the value Z, Z being greater than any representable number. This permits
3822 us to treat unbounded ranges as equal. */
3823 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3824 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3828 result = sgn0 == sgn1;
3831 result = sgn0 != sgn1;
3834 result = sgn0 < sgn1;
3837 result = sgn0 <= sgn1;
3840 result = sgn0 > sgn1;
3843 result = sgn0 >= sgn1;
3849 return constant_boolean_node (result, type);
3852 /* Given EXP, a logical expression, set the range it is testing into
3853 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3854 actually being tested. *PLOW and *PHIGH will be made of the same type
3855 as the returned expression. If EXP is not a comparison, we will most
3856 likely not be returning a useful value and range. */
3859 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3861 enum tree_code code;
3862 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3863 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3865 tree low, high, n_low, n_high;
3867 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3868 and see if we can refine the range. Some of the cases below may not
3869 happen, but it doesn't seem worth worrying about this. We "continue"
3870 the outer loop when we've changed something; otherwise we "break"
3871 the switch, which will "break" the while. */
3874 low = high = build_int_cst (TREE_TYPE (exp), 0);
3878 code = TREE_CODE (exp);
3879 exp_type = TREE_TYPE (exp);
3881 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3883 if (TREE_CODE_LENGTH (code) > 0)
3884 arg0 = TREE_OPERAND (exp, 0);
3885 if (TREE_CODE_CLASS (code) == tcc_comparison
3886 || TREE_CODE_CLASS (code) == tcc_unary
3887 || TREE_CODE_CLASS (code) == tcc_binary)
3888 arg0_type = TREE_TYPE (arg0);
3889 if (TREE_CODE_CLASS (code) == tcc_binary
3890 || TREE_CODE_CLASS (code) == tcc_comparison
3891 || (TREE_CODE_CLASS (code) == tcc_expression
3892 && TREE_CODE_LENGTH (code) > 1))
3893 arg1 = TREE_OPERAND (exp, 1);
3898 case TRUTH_NOT_EXPR:
3899 in_p = ! in_p, exp = arg0;
3902 case EQ_EXPR: case NE_EXPR:
3903 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3904 /* We can only do something if the range is testing for zero
3905 and if the second operand is an integer constant. Note that
3906 saying something is "in" the range we make is done by
3907 complementing IN_P since it will set in the initial case of
3908 being not equal to zero; "out" is leaving it alone. */
3909 if (low == 0 || high == 0
3910 || ! integer_zerop (low) || ! integer_zerop (high)
3911 || TREE_CODE (arg1) != INTEGER_CST)
3916 case NE_EXPR: /* - [c, c] */
3919 case EQ_EXPR: /* + [c, c] */
3920 in_p = ! in_p, low = high = arg1;
3922 case GT_EXPR: /* - [-, c] */
3923 low = 0, high = arg1;
3925 case GE_EXPR: /* + [c, -] */
3926 in_p = ! in_p, low = arg1, high = 0;
3928 case LT_EXPR: /* - [c, -] */
3929 low = arg1, high = 0;
3931 case LE_EXPR: /* + [-, c] */
3932 in_p = ! in_p, low = 0, high = arg1;
3938 /* If this is an unsigned comparison, we also know that EXP is
3939 greater than or equal to zero. We base the range tests we make
3940 on that fact, so we record it here so we can parse existing
3941 range tests. We test arg0_type since often the return type
3942 of, e.g. EQ_EXPR, is boolean. */
3943 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3945 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3947 build_int_cst (arg0_type, 0),
3951 in_p = n_in_p, low = n_low, high = n_high;
3953 /* If the high bound is missing, but we have a nonzero low
3954 bound, reverse the range so it goes from zero to the low bound
3956 if (high == 0 && low && ! integer_zerop (low))
3959 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3960 integer_one_node, 0);
3961 low = build_int_cst (arg0_type, 0);
3969 /* (-x) IN [a,b] -> x in [-b, -a] */
3970 n_low = range_binop (MINUS_EXPR, exp_type,
3971 build_int_cst (exp_type, 0),
3973 n_high = range_binop (MINUS_EXPR, exp_type,
3974 build_int_cst (exp_type, 0),
3976 low = n_low, high = n_high;
3982 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3983 build_int_cst (exp_type, 1));
3986 case PLUS_EXPR: case MINUS_EXPR:
3987 if (TREE_CODE (arg1) != INTEGER_CST)
3990 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3991 move a constant to the other side. */
3992 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3995 /* If EXP is signed, any overflow in the computation is undefined,
3996 so we don't worry about it so long as our computations on
3997 the bounds don't overflow. For unsigned, overflow is defined
3998 and this is exactly the right thing. */
3999 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4000 arg0_type, low, 0, arg1, 0);
4001 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4002 arg0_type, high, 1, arg1, 0);
4003 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4004 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4007 /* Check for an unsigned range which has wrapped around the maximum
4008 value thus making n_high < n_low, and normalize it. */
4009 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4011 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4012 integer_one_node, 0);
4013 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4014 integer_one_node, 0);
4016 /* If the range is of the form +/- [ x+1, x ], we won't
4017 be able to normalize it. But then, it represents the
4018 whole range or the empty set, so make it
4020 if (tree_int_cst_equal (n_low, low)
4021 && tree_int_cst_equal (n_high, high))
4027 low = n_low, high = n_high;
4032 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4033 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4036 if (! INTEGRAL_TYPE_P (arg0_type)
4037 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4038 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4041 n_low = low, n_high = high;
4044 n_low = fold_convert (arg0_type, n_low);
4047 n_high = fold_convert (arg0_type, n_high);
4050 /* If we're converting arg0 from an unsigned type, to exp,
4051 a signed type, we will be doing the comparison as unsigned.
4052 The tests above have already verified that LOW and HIGH
4055 So we have to ensure that we will handle large unsigned
4056 values the same way that the current signed bounds treat
4059 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4062 tree equiv_type = lang_hooks.types.type_for_mode
4063 (TYPE_MODE (arg0_type), 1);
4065 /* A range without an upper bound is, naturally, unbounded.
4066 Since convert would have cropped a very large value, use
4067 the max value for the destination type. */
4069 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4070 : TYPE_MAX_VALUE (arg0_type);
4072 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4073 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4074 fold_convert (arg0_type,
4076 build_int_cst (arg0_type, 1));
4078 /* If the low bound is specified, "and" the range with the
4079 range for which the original unsigned value will be
4083 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4084 1, n_low, n_high, 1,
4085 fold_convert (arg0_type,
4090 in_p = (n_in_p == in_p);
4094 /* Otherwise, "or" the range with the range of the input
4095 that will be interpreted as negative. */
4096 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4097 0, n_low, n_high, 1,
4098 fold_convert (arg0_type,
4103 in_p = (in_p != n_in_p);
4108 low = n_low, high = n_high;
4118 /* If EXP is a constant, we can evaluate whether this is true or false. */
4119 if (TREE_CODE (exp) == INTEGER_CST)
4121 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4123 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4129 *pin_p = in_p, *plow = low, *phigh = high;
4133 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4134 type, TYPE, return an expression to test if EXP is in (or out of, depending
4135 on IN_P) the range. Return 0 if the test couldn't be created. */
4138 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4140 tree etype = TREE_TYPE (exp);
4143 #ifdef HAVE_canonicalize_funcptr_for_compare
4144 /* Disable this optimization for function pointer expressions
4145 on targets that require function pointer canonicalization. */
4146 if (HAVE_canonicalize_funcptr_for_compare
4147 && TREE_CODE (etype) == POINTER_TYPE
4148 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4154 value = build_range_check (type, exp, 1, low, high);
4156 return invert_truthvalue (value);
4161 if (low == 0 && high == 0)
4162 return build_int_cst (type, 1);
4165 return fold_build2 (LE_EXPR, type, exp,
4166 fold_convert (etype, high));
4169 return fold_build2 (GE_EXPR, type, exp,
4170 fold_convert (etype, low));
4172 if (operand_equal_p (low, high, 0))
4173 return fold_build2 (EQ_EXPR, type, exp,
4174 fold_convert (etype, low));
4176 if (integer_zerop (low))
4178 if (! TYPE_UNSIGNED (etype))
4180 etype = lang_hooks.types.unsigned_type (etype);
4181 high = fold_convert (etype, high);
4182 exp = fold_convert (etype, exp);
4184 return build_range_check (type, exp, 1, 0, high);
4187 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4188 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4190 unsigned HOST_WIDE_INT lo;
4194 prec = TYPE_PRECISION (etype);
4195 if (prec <= HOST_BITS_PER_WIDE_INT)
4198 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4202 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4203 lo = (unsigned HOST_WIDE_INT) -1;
4206 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4208 if (TYPE_UNSIGNED (etype))
4210 etype = lang_hooks.types.signed_type (etype);
4211 exp = fold_convert (etype, exp);
4213 return fold_build2 (GT_EXPR, type, exp,
4214 build_int_cst (etype, 0));
4218 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4219 This requires wrap-around arithmetics for the type of the expression. */
4220 switch (TREE_CODE (etype))
4223 /* There is no requirement that LOW be within the range of ETYPE
4224 if the latter is a subtype. It must, however, be within the base
4225 type of ETYPE. So be sure we do the subtraction in that type. */
4226 if (TREE_TYPE (etype))
4227 etype = TREE_TYPE (etype);
4232 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4233 TYPE_UNSIGNED (etype));
4240 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4241 if (TREE_CODE (etype) == INTEGER_TYPE
4242 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4244 tree utype, minv, maxv;
4246 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4247 for the type in question, as we rely on this here. */
4248 utype = lang_hooks.types.unsigned_type (etype);
4249 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4250 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4251 integer_one_node, 1);
4252 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4254 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4261 high = fold_convert (etype, high);
4262 low = fold_convert (etype, low);
4263 exp = fold_convert (etype, exp);
4265 value = const_binop (MINUS_EXPR, high, low, 0);
4267 if (value != 0 && !TREE_OVERFLOW (value))
4268 return build_range_check (type,
4269 fold_build2 (MINUS_EXPR, etype, exp, low),
4270 1, build_int_cst (etype, 0), value);
4275 /* Return the predecessor of VAL in its type, handling the infinite case. */
4278 range_predecessor (tree val)
4280 tree type = TREE_TYPE (val);
4282 if (INTEGRAL_TYPE_P (type)
4283 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4286 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4289 /* Return the successor of VAL in its type, handling the infinite case. */
4292 range_successor (tree val)
4294 tree type = TREE_TYPE (val);
4296 if (INTEGRAL_TYPE_P (type)
4297 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4300 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4303 /* Given two ranges, see if we can merge them into one. Return 1 if we
4304 can, 0 if we can't. Set the output range into the specified parameters. */
4307 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4308 tree high0, int in1_p, tree low1, tree high1)
4316 int lowequal = ((low0 == 0 && low1 == 0)
4317 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4318 low0, 0, low1, 0)));
4319 int highequal = ((high0 == 0 && high1 == 0)
4320 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4321 high0, 1, high1, 1)));
4323 /* Make range 0 be the range that starts first, or ends last if they
4324 start at the same value. Swap them if it isn't. */
4325 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4328 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4329 high1, 1, high0, 1))))
4331 temp = in0_p, in0_p = in1_p, in1_p = temp;
4332 tem = low0, low0 = low1, low1 = tem;
4333 tem = high0, high0 = high1, high1 = tem;
4336 /* Now flag two cases, whether the ranges are disjoint or whether the
4337 second range is totally subsumed in the first. Note that the tests
4338 below are simplified by the ones above. */
4339 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4340 high0, 1, low1, 0));
4341 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4342 high1, 1, high0, 1));
4344 /* We now have four cases, depending on whether we are including or
4345 excluding the two ranges. */
4348 /* If they don't overlap, the result is false. If the second range
4349 is a subset it is the result. Otherwise, the range is from the start
4350 of the second to the end of the first. */
4352 in_p = 0, low = high = 0;
4354 in_p = 1, low = low1, high = high1;
4356 in_p = 1, low = low1, high = high0;
4359 else if (in0_p && ! in1_p)
4361 /* If they don't overlap, the result is the first range. If they are
4362 equal, the result is false. If the second range is a subset of the
4363 first, and the ranges begin at the same place, we go from just after
4364 the end of the second range to the end of the first. If the second
4365 range is not a subset of the first, or if it is a subset and both
4366 ranges end at the same place, the range starts at the start of the
4367 first range and ends just before the second range.
4368 Otherwise, we can't describe this as a single range. */
4370 in_p = 1, low = low0, high = high0;
4371 else if (lowequal && highequal)
4372 in_p = 0, low = high = 0;
4373 else if (subset && lowequal)
4375 low = range_successor (high1);
4379 else if (! subset || highequal)
4382 high = range_predecessor (low1);
4389 else if (! in0_p && in1_p)
4391 /* If they don't overlap, the result is the second range. If the second
4392 is a subset of the first, the result is false. Otherwise,
4393 the range starts just after the first range and ends at the
4394 end of the second. */
4396 in_p = 1, low = low1, high = high1;
4397 else if (subset || highequal)
4398 in_p = 0, low = high = 0;
4401 low = range_successor (high0);
4409 /* The case where we are excluding both ranges. Here the complex case
4410 is if they don't overlap. In that case, the only time we have a
4411 range is if they are adjacent. If the second is a subset of the
4412 first, the result is the first. Otherwise, the range to exclude
4413 starts at the beginning of the first range and ends at the end of the
4417 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4418 range_successor (high0),
4420 in_p = 0, low = low0, high = high1;
4423 /* Canonicalize - [min, x] into - [-, x]. */
4424 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4425 switch (TREE_CODE (TREE_TYPE (low0)))
4428 if (TYPE_PRECISION (TREE_TYPE (low0))
4429 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4433 if (tree_int_cst_equal (low0,
4434 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4438 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4439 && integer_zerop (low0))
4446 /* Canonicalize - [x, max] into - [x, -]. */
4447 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4448 switch (TREE_CODE (TREE_TYPE (high1)))
4451 if (TYPE_PRECISION (TREE_TYPE (high1))
4452 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4456 if (tree_int_cst_equal (high1,
4457 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4461 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4462 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4464 integer_one_node, 1)))
4471 /* The ranges might be also adjacent between the maximum and
4472 minimum values of the given type. For
4473 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4474 return + [x + 1, y - 1]. */
4475 if (low0 == 0 && high1 == 0)
4477 low = range_successor (high0);
4478 high = range_predecessor (low1);
4479 if (low == 0 || high == 0)
4489 in_p = 0, low = low0, high = high0;
4491 in_p = 0, low = low0, high = high1;
4494 *pin_p = in_p, *plow = low, *phigh = high;
4499 /* Subroutine of fold, looking inside expressions of the form
4500 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4501 of the COND_EXPR. This function is being used also to optimize
4502 A op B ? C : A, by reversing the comparison first.
4504 Return a folded expression whose code is not a COND_EXPR
4505 anymore, or NULL_TREE if no folding opportunity is found. */
4508 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4510 enum tree_code comp_code = TREE_CODE (arg0);
4511 tree arg00 = TREE_OPERAND (arg0, 0);
4512 tree arg01 = TREE_OPERAND (arg0, 1);
4513 tree arg1_type = TREE_TYPE (arg1);
4519 /* If we have A op 0 ? A : -A, consider applying the following
4522 A == 0? A : -A same as -A
4523 A != 0? A : -A same as A
4524 A >= 0? A : -A same as abs (A)
4525 A > 0? A : -A same as abs (A)
4526 A <= 0? A : -A same as -abs (A)
4527 A < 0? A : -A same as -abs (A)
4529 None of these transformations work for modes with signed
4530 zeros. If A is +/-0, the first two transformations will
4531 change the sign of the result (from +0 to -0, or vice
4532 versa). The last four will fix the sign of the result,
4533 even though the original expressions could be positive or
4534 negative, depending on the sign of A.
4536 Note that all these transformations are correct if A is
4537 NaN, since the two alternatives (A and -A) are also NaNs. */
4538 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4539 ? real_zerop (arg01)
4540 : integer_zerop (arg01))
4541 && ((TREE_CODE (arg2) == NEGATE_EXPR
4542 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4543 /* In the case that A is of the form X-Y, '-A' (arg2) may
4544 have already been folded to Y-X, check for that. */
4545 || (TREE_CODE (arg1) == MINUS_EXPR
4546 && TREE_CODE (arg2) == MINUS_EXPR
4547 && operand_equal_p (TREE_OPERAND (arg1, 0),
4548 TREE_OPERAND (arg2, 1), 0)
4549 && operand_equal_p (TREE_OPERAND (arg1, 1),
4550 TREE_OPERAND (arg2, 0), 0))))
4555 tem = fold_convert (arg1_type, arg1);
4556 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4559 return pedantic_non_lvalue (fold_convert (type, arg1));
4562 if (flag_trapping_math)
4567 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4568 arg1 = fold_convert (lang_hooks.types.signed_type
4569 (TREE_TYPE (arg1)), arg1);
4570 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4571 return pedantic_non_lvalue (fold_convert (type, tem));
4574 if (flag_trapping_math)
4578 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4579 arg1 = fold_convert (lang_hooks.types.signed_type
4580 (TREE_TYPE (arg1)), arg1);
4581 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4582 return negate_expr (fold_convert (type, tem));
4584 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4588 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4589 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4590 both transformations are correct when A is NaN: A != 0
4591 is then true, and A == 0 is false. */
4593 if (integer_zerop (arg01) && integer_zerop (arg2))
4595 if (comp_code == NE_EXPR)
4596 return pedantic_non_lvalue (fold_convert (type, arg1));
4597 else if (comp_code == EQ_EXPR)
4598 return build_int_cst (type, 0);
4601 /* Try some transformations of A op B ? A : B.
4603 A == B? A : B same as B
4604 A != B? A : B same as A
4605 A >= B? A : B same as max (A, B)
4606 A > B? A : B same as max (B, A)
4607 A <= B? A : B same as min (A, B)
4608 A < B? A : B same as min (B, A)
4610 As above, these transformations don't work in the presence
4611 of signed zeros. For example, if A and B are zeros of
4612 opposite sign, the first two transformations will change
4613 the sign of the result. In the last four, the original
4614 expressions give different results for (A=+0, B=-0) and
4615 (A=-0, B=+0), but the transformed expressions do not.
4617 The first two transformations are correct if either A or B
4618 is a NaN. In the first transformation, the condition will
4619 be false, and B will indeed be chosen. In the case of the
4620 second transformation, the condition A != B will be true,
4621 and A will be chosen.
4623 The conversions to max() and min() are not correct if B is
4624 a number and A is not. The conditions in the original
4625 expressions will be false, so all four give B. The min()
4626 and max() versions would give a NaN instead. */
4627 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4628 /* Avoid these transformations if the COND_EXPR may be used
4629 as an lvalue in the C++ front-end. PR c++/19199. */
4631 || (strcmp (lang_hooks.name, "GNU C++") != 0
4632 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4633 || ! maybe_lvalue_p (arg1)
4634 || ! maybe_lvalue_p (arg2)))
4636 tree comp_op0 = arg00;
4637 tree comp_op1 = arg01;
4638 tree comp_type = TREE_TYPE (comp_op0);
4640 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4641 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4651 return pedantic_non_lvalue (fold_convert (type, arg2));
4653 return pedantic_non_lvalue (fold_convert (type, arg1));
4658 /* In C++ a ?: expression can be an lvalue, so put the
4659 operand which will be used if they are equal first
4660 so that we can convert this back to the
4661 corresponding COND_EXPR. */
4662 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4664 comp_op0 = fold_convert (comp_type, comp_op0);
4665 comp_op1 = fold_convert (comp_type, comp_op1);
4666 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4667 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4668 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4669 return pedantic_non_lvalue (fold_convert (type, tem));
4676 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4678 comp_op0 = fold_convert (comp_type, comp_op0);
4679 comp_op1 = fold_convert (comp_type, comp_op1);
4680 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4681 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4682 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4683 return pedantic_non_lvalue (fold_convert (type, tem));
4687 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4688 return pedantic_non_lvalue (fold_convert (type, arg2));
4691 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4692 return pedantic_non_lvalue (fold_convert (type, arg1));
4695 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4700 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4701 we might still be able to simplify this. For example,
4702 if C1 is one less or one more than C2, this might have started
4703 out as a MIN or MAX and been transformed by this function.
4704 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4706 if (INTEGRAL_TYPE_P (type)
4707 && TREE_CODE (arg01) == INTEGER_CST
4708 && TREE_CODE (arg2) == INTEGER_CST)
4712 /* We can replace A with C1 in this case. */
4713 arg1 = fold_convert (type, arg01);
4714 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4717 /* If C1 is C2 + 1, this is min(A, C2). */
4718 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4720 && operand_equal_p (arg01,
4721 const_binop (PLUS_EXPR, arg2,
4722 build_int_cst (type, 1), 0),
4724 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4729 /* If C1 is C2 - 1, this is min(A, C2). */
4730 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4732 && operand_equal_p (arg01,
4733 const_binop (MINUS_EXPR, arg2,
4734 build_int_cst (type, 1), 0),
4736 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4741 /* If C1 is C2 - 1, this is max(A, C2). */
4742 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4744 && operand_equal_p (arg01,
4745 const_binop (MINUS_EXPR, arg2,
4746 build_int_cst (type, 1), 0),
4748 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4753 /* If C1 is C2 + 1, this is max(A, C2). */
4754 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4756 && operand_equal_p (arg01,
4757 const_binop (PLUS_EXPR, arg2,
4758 build_int_cst (type, 1), 0),
4760 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4774 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4775 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4778 /* EXP is some logical combination of boolean tests. See if we can
4779 merge it into some range test. Return the new tree if so. */
4782 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4784 int or_op = (code == TRUTH_ORIF_EXPR
4785 || code == TRUTH_OR_EXPR);
4786 int in0_p, in1_p, in_p;
4787 tree low0, low1, low, high0, high1, high;
4788 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4789 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4792 /* If this is an OR operation, invert both sides; we will invert
4793 again at the end. */
4795 in0_p = ! in0_p, in1_p = ! in1_p;
4797 /* If both expressions are the same, if we can merge the ranges, and we
4798 can build the range test, return it or it inverted. If one of the
4799 ranges is always true or always false, consider it to be the same
4800 expression as the other. */
4801 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4802 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4804 && 0 != (tem = (build_range_check (type,
4806 : rhs != 0 ? rhs : integer_zero_node,
4808 return or_op ? invert_truthvalue (tem) : tem;
4810 /* On machines where the branch cost is expensive, if this is a
4811 short-circuited branch and the underlying object on both sides
4812 is the same, make a non-short-circuit operation. */
4813 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4814 && lhs != 0 && rhs != 0
4815 && (code == TRUTH_ANDIF_EXPR
4816 || code == TRUTH_ORIF_EXPR)
4817 && operand_equal_p (lhs, rhs, 0))
4819 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4820 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4821 which cases we can't do this. */
4822 if (simple_operand_p (lhs))
4823 return build2 (code == TRUTH_ANDIF_EXPR
4824 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4827 else if (lang_hooks.decls.global_bindings_p () == 0
4828 && ! CONTAINS_PLACEHOLDER_P (lhs))
4830 tree common = save_expr (lhs);
4832 if (0 != (lhs = build_range_check (type, common,
4833 or_op ? ! in0_p : in0_p,
4835 && (0 != (rhs = build_range_check (type, common,
4836 or_op ? ! in1_p : in1_p,
4838 return build2 (code == TRUTH_ANDIF_EXPR
4839 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4847 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4848 bit value. Arrange things so the extra bits will be set to zero if and
4849 only if C is signed-extended to its full width. If MASK is nonzero,
4850 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4853 unextend (tree c, int p, int unsignedp, tree mask)
4855 tree type = TREE_TYPE (c);
4856 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4859 if (p == modesize || unsignedp)
4862 /* We work by getting just the sign bit into the low-order bit, then
4863 into the high-order bit, then sign-extend. We then XOR that value
4865 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4866 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4868 /* We must use a signed type in order to get an arithmetic right shift.
4869 However, we must also avoid introducing accidental overflows, so that
4870 a subsequent call to integer_zerop will work. Hence we must
4871 do the type conversion here. At this point, the constant is either
4872 zero or one, and the conversion to a signed type can never overflow.
4873 We could get an overflow if this conversion is done anywhere else. */
4874 if (TYPE_UNSIGNED (type))
4875 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4877 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4878 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4880 temp = const_binop (BIT_AND_EXPR, temp,
4881 fold_convert (TREE_TYPE (c), mask), 0);
4882 /* If necessary, convert the type back to match the type of C. */
4883 if (TYPE_UNSIGNED (type))
4884 temp = fold_convert (type, temp);
4886 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4889 /* Find ways of folding logical expressions of LHS and RHS:
4890 Try to merge two comparisons to the same innermost item.
4891 Look for range tests like "ch >= '0' && ch <= '9'".
4892 Look for combinations of simple terms on machines with expensive branches
4893 and evaluate the RHS unconditionally.
4895 For example, if we have p->a == 2 && p->b == 4 and we can make an
4896 object large enough to span both A and B, we can do this with a comparison
4897 against the object ANDed with the a mask.
4899 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4900 operations to do this with one comparison.
4902 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4903 function and the one above.
4905 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4906 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4908 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4911 We return the simplified tree or 0 if no optimization is possible. */
4914 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4916 /* If this is the "or" of two comparisons, we can do something if
4917 the comparisons are NE_EXPR. If this is the "and", we can do something
4918 if the comparisons are EQ_EXPR. I.e.,
4919 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4921 WANTED_CODE is this operation code. For single bit fields, we can
4922 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4923 comparison for one-bit fields. */
4925 enum tree_code wanted_code;
4926 enum tree_code lcode, rcode;
4927 tree ll_arg, lr_arg, rl_arg, rr_arg;
4928 tree ll_inner, lr_inner, rl_inner, rr_inner;
4929 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4930 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4931 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4932 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4933 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4934 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4935 enum machine_mode lnmode, rnmode;
4936 tree ll_mask, lr_mask, rl_mask, rr_mask;
4937 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4938 tree l_const, r_const;
4939 tree lntype, rntype, result;
4940 int first_bit, end_bit;
4942 tree orig_lhs = lhs, orig_rhs = rhs;
4943 enum tree_code orig_code = code;
4945 /* Start by getting the comparison codes. Fail if anything is volatile.
4946 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4947 it were surrounded with a NE_EXPR. */
4949 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4952 lcode = TREE_CODE (lhs);
4953 rcode = TREE_CODE (rhs);
4955 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4957 lhs = build2 (NE_EXPR, truth_type, lhs,
4958 build_int_cst (TREE_TYPE (lhs), 0));
4962 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4964 rhs = build2 (NE_EXPR, truth_type, rhs,
4965 build_int_cst (TREE_TYPE (rhs), 0));
4969 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4970 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4973 ll_arg = TREE_OPERAND (lhs, 0);
4974 lr_arg = TREE_OPERAND (lhs, 1);
4975 rl_arg = TREE_OPERAND (rhs, 0);
4976 rr_arg = TREE_OPERAND (rhs, 1);
4978 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4979 if (simple_operand_p (ll_arg)
4980 && simple_operand_p (lr_arg))
4983 if (operand_equal_p (ll_arg, rl_arg, 0)
4984 && operand_equal_p (lr_arg, rr_arg, 0))
4986 result = combine_comparisons (code, lcode, rcode,
4987 truth_type, ll_arg, lr_arg);
4991 else if (operand_equal_p (ll_arg, rr_arg, 0)
4992 && operand_equal_p (lr_arg, rl_arg, 0))
4994 result = combine_comparisons (code, lcode,
4995 swap_tree_comparison (rcode),
4996 truth_type, ll_arg, lr_arg);
5002 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5003 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5005 /* If the RHS can be evaluated unconditionally and its operands are
5006 simple, it wins to evaluate the RHS unconditionally on machines
5007 with expensive branches. In this case, this isn't a comparison
5008 that can be merged. Avoid doing this if the RHS is a floating-point
5009 comparison since those can trap. */
5011 if (BRANCH_COST >= 2
5012 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5013 && simple_operand_p (rl_arg)
5014 && simple_operand_p (rr_arg))
5016 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5017 if (code == TRUTH_OR_EXPR
5018 && lcode == NE_EXPR && integer_zerop (lr_arg)
5019 && rcode == NE_EXPR && integer_zerop (rr_arg)
5020 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5021 return build2 (NE_EXPR, truth_type,
5022 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5024 build_int_cst (TREE_TYPE (ll_arg), 0));
5026 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5027 if (code == TRUTH_AND_EXPR
5028 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5029 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5030 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5031 return build2 (EQ_EXPR, truth_type,
5032 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5034 build_int_cst (TREE_TYPE (ll_arg), 0));
5036 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5038 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5039 return build2 (code, truth_type, lhs, rhs);
5044 /* See if the comparisons can be merged. Then get all the parameters for
5047 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5048 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5052 ll_inner = decode_field_reference (ll_arg,
5053 &ll_bitsize, &ll_bitpos, &ll_mode,
5054 &ll_unsignedp, &volatilep, &ll_mask,
5056 lr_inner = decode_field_reference (lr_arg,
5057 &lr_bitsize, &lr_bitpos, &lr_mode,
5058 &lr_unsignedp, &volatilep, &lr_mask,
5060 rl_inner = decode_field_reference (rl_arg,
5061 &rl_bitsize, &rl_bitpos, &rl_mode,
5062 &rl_unsignedp, &volatilep, &rl_mask,
5064 rr_inner = decode_field_reference (rr_arg,
5065 &rr_bitsize, &rr_bitpos, &rr_mode,
5066 &rr_unsignedp, &volatilep, &rr_mask,
5069 /* It must be true that the inner operation on the lhs of each
5070 comparison must be the same if we are to be able to do anything.
5071 Then see if we have constants. If not, the same must be true for
5073 if (volatilep || ll_inner == 0 || rl_inner == 0
5074 || ! operand_equal_p (ll_inner, rl_inner, 0))
5077 if (TREE_CODE (lr_arg) == INTEGER_CST
5078 && TREE_CODE (rr_arg) == INTEGER_CST)
5079 l_const = lr_arg, r_const = rr_arg;
5080 else if (lr_inner == 0 || rr_inner == 0
5081 || ! operand_equal_p (lr_inner, rr_inner, 0))
5084 l_const = r_const = 0;
5086 /* If either comparison code is not correct for our logical operation,
5087 fail. However, we can convert a one-bit comparison against zero into
5088 the opposite comparison against that bit being set in the field. */
5090 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5091 if (lcode != wanted_code)
5093 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5095 /* Make the left operand unsigned, since we are only interested
5096 in the value of one bit. Otherwise we are doing the wrong
5105 /* This is analogous to the code for l_const above. */
5106 if (rcode != wanted_code)
5108 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5117 /* After this point all optimizations will generate bit-field
5118 references, which we might not want. */
5119 if (! lang_hooks.can_use_bit_fields_p ())
5122 /* See if we can find a mode that contains both fields being compared on
5123 the left. If we can't, fail. Otherwise, update all constants and masks
5124 to be relative to a field of that size. */
5125 first_bit = MIN (ll_bitpos, rl_bitpos);
5126 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5127 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5128 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5130 if (lnmode == VOIDmode)
5133 lnbitsize = GET_MODE_BITSIZE (lnmode);
5134 lnbitpos = first_bit & ~ (lnbitsize - 1);
5135 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5136 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5138 if (BYTES_BIG_ENDIAN)
5140 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5141 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5144 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5145 size_int (xll_bitpos), 0);
5146 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5147 size_int (xrl_bitpos), 0);
5151 l_const = fold_convert (lntype, l_const);
5152 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5153 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5154 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5155 fold_build1 (BIT_NOT_EXPR,
5159 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5161 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5166 r_const = fold_convert (lntype, r_const);
5167 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5168 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5169 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5170 fold_build1 (BIT_NOT_EXPR,
5174 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5176 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5180 /* If the right sides are not constant, do the same for it. Also,
5181 disallow this optimization if a size or signedness mismatch occurs
5182 between the left and right sides. */
5185 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5186 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5187 /* Make sure the two fields on the right
5188 correspond to the left without being swapped. */
5189 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5192 first_bit = MIN (lr_bitpos, rr_bitpos);
5193 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5194 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5195 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5197 if (rnmode == VOIDmode)
5200 rnbitsize = GET_MODE_BITSIZE (rnmode);
5201 rnbitpos = first_bit & ~ (rnbitsize - 1);
5202 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5203 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5205 if (BYTES_BIG_ENDIAN)
5207 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5208 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5211 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5212 size_int (xlr_bitpos), 0);
5213 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5214 size_int (xrr_bitpos), 0);
5216 /* Make a mask that corresponds to both fields being compared.
5217 Do this for both items being compared. If the operands are the
5218 same size and the bits being compared are in the same position
5219 then we can do this by masking both and comparing the masked
5221 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5222 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5223 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5225 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5226 ll_unsignedp || rl_unsignedp);
5227 if (! all_ones_mask_p (ll_mask, lnbitsize))
5228 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5230 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5231 lr_unsignedp || rr_unsignedp);
5232 if (! all_ones_mask_p (lr_mask, rnbitsize))
5233 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5235 return build2 (wanted_code, truth_type, lhs, rhs);
5238 /* There is still another way we can do something: If both pairs of
5239 fields being compared are adjacent, we may be able to make a wider
5240 field containing them both.
5242 Note that we still must mask the lhs/rhs expressions. Furthermore,
5243 the mask must be shifted to account for the shift done by
5244 make_bit_field_ref. */
5245 if ((ll_bitsize + ll_bitpos == rl_bitpos
5246 && lr_bitsize + lr_bitpos == rr_bitpos)
5247 || (ll_bitpos == rl_bitpos + rl_bitsize
5248 && lr_bitpos == rr_bitpos + rr_bitsize))
5252 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5253 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5254 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5255 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5257 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5258 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5259 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5260 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5262 /* Convert to the smaller type before masking out unwanted bits. */
5264 if (lntype != rntype)
5266 if (lnbitsize > rnbitsize)
5268 lhs = fold_convert (rntype, lhs);
5269 ll_mask = fold_convert (rntype, ll_mask);
5272 else if (lnbitsize < rnbitsize)
5274 rhs = fold_convert (lntype, rhs);
5275 lr_mask = fold_convert (lntype, lr_mask);
5280 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5281 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5283 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5284 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5286 return build2 (wanted_code, truth_type, lhs, rhs);
5292 /* Handle the case of comparisons with constants. If there is something in
5293 common between the masks, those bits of the constants must be the same.
5294 If not, the condition is always false. Test for this to avoid generating
5295 incorrect code below. */
5296 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5297 if (! integer_zerop (result)
5298 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5299 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5301 if (wanted_code == NE_EXPR)
5303 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5304 return constant_boolean_node (true, truth_type);
5308 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5309 return constant_boolean_node (false, truth_type);
5313 /* Construct the expression we will return. First get the component
5314 reference we will make. Unless the mask is all ones the width of
5315 that field, perform the mask operation. Then compare with the
5317 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5318 ll_unsignedp || rl_unsignedp);
5320 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5321 if (! all_ones_mask_p (ll_mask, lnbitsize))
5322 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5324 return build2 (wanted_code, truth_type, result,
5325 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5328 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5332 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5335 enum tree_code op_code;
5336 tree comp_const = op1;
5338 int consts_equal, consts_lt;
5341 STRIP_SIGN_NOPS (arg0);
5343 op_code = TREE_CODE (arg0);
5344 minmax_const = TREE_OPERAND (arg0, 1);
5345 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5346 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5347 inner = TREE_OPERAND (arg0, 0);
5349 /* If something does not permit us to optimize, return the original tree. */
5350 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5351 || TREE_CODE (comp_const) != INTEGER_CST
5352 || TREE_CONSTANT_OVERFLOW (comp_const)
5353 || TREE_CODE (minmax_const) != INTEGER_CST
5354 || TREE_CONSTANT_OVERFLOW (minmax_const))
5357 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5358 and GT_EXPR, doing the rest with recursive calls using logical
5362 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5364 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5367 return invert_truthvalue (tem);
5373 fold_build2 (TRUTH_ORIF_EXPR, type,
5374 optimize_minmax_comparison
5375 (EQ_EXPR, type, arg0, comp_const),
5376 optimize_minmax_comparison
5377 (GT_EXPR, type, arg0, comp_const));
5380 if (op_code == MAX_EXPR && consts_equal)
5381 /* MAX (X, 0) == 0 -> X <= 0 */
5382 return fold_build2 (LE_EXPR, type, inner, comp_const);
5384 else if (op_code == MAX_EXPR && consts_lt)
5385 /* MAX (X, 0) == 5 -> X == 5 */
5386 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5388 else if (op_code == MAX_EXPR)
5389 /* MAX (X, 0) == -1 -> false */
5390 return omit_one_operand (type, integer_zero_node, inner);
5392 else if (consts_equal)
5393 /* MIN (X, 0) == 0 -> X >= 0 */
5394 return fold_build2 (GE_EXPR, type, inner, comp_const);
5397 /* MIN (X, 0) == 5 -> false */
5398 return omit_one_operand (type, integer_zero_node, inner);
5401 /* MIN (X, 0) == -1 -> X == -1 */
5402 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5405 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5406 /* MAX (X, 0) > 0 -> X > 0
5407 MAX (X, 0) > 5 -> X > 5 */
5408 return fold_build2 (GT_EXPR, type, inner, comp_const);
5410 else if (op_code == MAX_EXPR)
5411 /* MAX (X, 0) > -1 -> true */
5412 return omit_one_operand (type, integer_one_node, inner);
5414 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5415 /* MIN (X, 0) > 0 -> false
5416 MIN (X, 0) > 5 -> false */
5417 return omit_one_operand (type, integer_zero_node, inner);
5420 /* MIN (X, 0) > -1 -> X > -1 */
5421 return fold_build2 (GT_EXPR, type, inner, comp_const);
5428 /* T is an integer expression that is being multiplied, divided, or taken a
5429 modulus (CODE says which and what kind of divide or modulus) by a
5430 constant C. See if we can eliminate that operation by folding it with
5431 other operations already in T. WIDE_TYPE, if non-null, is a type that
5432 should be used for the computation if wider than our type.
5434 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5435 (X * 2) + (Y * 4). We must, however, be assured that either the original
5436 expression would not overflow or that overflow is undefined for the type
5437 in the language in question.
5439 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5440 the machine has a multiply-accumulate insn or that this is part of an
5441 addressing calculation.
5443 If we return a non-null expression, it is an equivalent form of the
5444 original computation, but need not be in the original type. */
5447 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5449 /* To avoid exponential search depth, refuse to allow recursion past
5450 three levels. Beyond that (1) it's highly unlikely that we'll find
5451 something interesting and (2) we've probably processed it before
5452 when we built the inner expression. */
5461 ret = extract_muldiv_1 (t, c, code, wide_type);
5468 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5470 tree type = TREE_TYPE (t);
5471 enum tree_code tcode = TREE_CODE (t);
5472 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5473 > GET_MODE_SIZE (TYPE_MODE (type)))
5474 ? wide_type : type);
5476 int same_p = tcode == code;
5477 tree op0 = NULL_TREE, op1 = NULL_TREE;
5479 /* Don't deal with constants of zero here; they confuse the code below. */
5480 if (integer_zerop (c))
5483 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5484 op0 = TREE_OPERAND (t, 0);
5486 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5487 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5489 /* Note that we need not handle conditional operations here since fold
5490 already handles those cases. So just do arithmetic here. */
5494 /* For a constant, we can always simplify if we are a multiply
5495 or (for divide and modulus) if it is a multiple of our constant. */
5496 if (code == MULT_EXPR
5497 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5498 return const_binop (code, fold_convert (ctype, t),
5499 fold_convert (ctype, c), 0);
5502 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5503 /* If op0 is an expression ... */
5504 if ((COMPARISON_CLASS_P (op0)
5505 || UNARY_CLASS_P (op0)
5506 || BINARY_CLASS_P (op0)
5507 || EXPRESSION_CLASS_P (op0))
5508 /* ... and is unsigned, and its type is smaller than ctype,
5509 then we cannot pass through as widening. */
5510 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5511 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5512 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5513 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5514 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5515 /* ... or this is a truncation (t is narrower than op0),
5516 then we cannot pass through this narrowing. */
5517 || (GET_MODE_SIZE (TYPE_MODE (type))
5518 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5519 /* ... or signedness changes for division or modulus,
5520 then we cannot pass through this conversion. */
5521 || (code != MULT_EXPR
5522 && (TYPE_UNSIGNED (ctype)
5523 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5526 /* Pass the constant down and see if we can make a simplification. If
5527 we can, replace this expression with the inner simplification for
5528 possible later conversion to our or some other type. */
5529 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5530 && TREE_CODE (t2) == INTEGER_CST
5531 && ! TREE_CONSTANT_OVERFLOW (t2)
5532 && (0 != (t1 = extract_muldiv (op0, t2, code,
5534 ? ctype : NULL_TREE))))
5539 /* If widening the type changes it from signed to unsigned, then we
5540 must avoid building ABS_EXPR itself as unsigned. */
5541 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5543 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5544 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5546 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5547 return fold_convert (ctype, t1);
5553 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5554 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5557 case MIN_EXPR: case MAX_EXPR:
5558 /* If widening the type changes the signedness, then we can't perform
5559 this optimization as that changes the result. */
5560 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5563 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5564 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5565 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5567 if (tree_int_cst_sgn (c) < 0)
5568 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5570 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5571 fold_convert (ctype, t2));
5575 case LSHIFT_EXPR: case RSHIFT_EXPR:
5576 /* If the second operand is constant, this is a multiplication
5577 or floor division, by a power of two, so we can treat it that
5578 way unless the multiplier or divisor overflows. Signed
5579 left-shift overflow is implementation-defined rather than
5580 undefined in C90, so do not convert signed left shift into
5582 if (TREE_CODE (op1) == INTEGER_CST
5583 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5584 /* const_binop may not detect overflow correctly,
5585 so check for it explicitly here. */
5586 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5587 && TREE_INT_CST_HIGH (op1) == 0
5588 && 0 != (t1 = fold_convert (ctype,
5589 const_binop (LSHIFT_EXPR,
5592 && ! TREE_OVERFLOW (t1))
5593 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5594 ? MULT_EXPR : FLOOR_DIV_EXPR,
5595 ctype, fold_convert (ctype, op0), t1),
5596 c, code, wide_type);
5599 case PLUS_EXPR: case MINUS_EXPR:
5600 /* See if we can eliminate the operation on both sides. If we can, we
5601 can return a new PLUS or MINUS. If we can't, the only remaining
5602 cases where we can do anything are if the second operand is a
5604 t1 = extract_muldiv (op0, c, code, wide_type);
5605 t2 = extract_muldiv (op1, c, code, wide_type);
5606 if (t1 != 0 && t2 != 0
5607 && (code == MULT_EXPR
5608 /* If not multiplication, we can only do this if both operands
5609 are divisible by c. */
5610 || (multiple_of_p (ctype, op0, c)
5611 && multiple_of_p (ctype, op1, c))))
5612 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5613 fold_convert (ctype, t2));
5615 /* If this was a subtraction, negate OP1 and set it to be an addition.
5616 This simplifies the logic below. */
5617 if (tcode == MINUS_EXPR)
5618 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5620 if (TREE_CODE (op1) != INTEGER_CST)
5623 /* If either OP1 or C are negative, this optimization is not safe for
5624 some of the division and remainder types while for others we need
5625 to change the code. */
5626 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5628 if (code == CEIL_DIV_EXPR)
5629 code = FLOOR_DIV_EXPR;
5630 else if (code == FLOOR_DIV_EXPR)
5631 code = CEIL_DIV_EXPR;
5632 else if (code != MULT_EXPR
5633 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5637 /* If it's a multiply or a division/modulus operation of a multiple
5638 of our constant, do the operation and verify it doesn't overflow. */
5639 if (code == MULT_EXPR
5640 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5642 op1 = const_binop (code, fold_convert (ctype, op1),
5643 fold_convert (ctype, c), 0);
5644 /* We allow the constant to overflow with wrapping semantics. */
5646 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5652 /* If we have an unsigned type is not a sizetype, we cannot widen
5653 the operation since it will change the result if the original
5654 computation overflowed. */
5655 if (TYPE_UNSIGNED (ctype)
5656 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5660 /* If we were able to eliminate our operation from the first side,
5661 apply our operation to the second side and reform the PLUS. */
5662 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5663 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5665 /* The last case is if we are a multiply. In that case, we can
5666 apply the distributive law to commute the multiply and addition
5667 if the multiplication of the constants doesn't overflow. */
5668 if (code == MULT_EXPR)
5669 return fold_build2 (tcode, ctype,
5670 fold_build2 (code, ctype,
5671 fold_convert (ctype, op0),
5672 fold_convert (ctype, c)),
5678 /* We have a special case here if we are doing something like
5679 (C * 8) % 4 since we know that's zero. */
5680 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5681 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5682 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5683 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5684 return omit_one_operand (type, integer_zero_node, op0);
5686 /* ... fall through ... */
5688 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5689 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5690 /* If we can extract our operation from the LHS, do so and return a
5691 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5692 do something only if the second operand is a constant. */
5694 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5695 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5696 fold_convert (ctype, op1));
5697 else if (tcode == MULT_EXPR && code == MULT_EXPR
5698 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5699 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5700 fold_convert (ctype, t1));
5701 else if (TREE_CODE (op1) != INTEGER_CST)
5704 /* If these are the same operation types, we can associate them
5705 assuming no overflow. */
5707 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5708 fold_convert (ctype, c), 0))
5709 && ! TREE_OVERFLOW (t1))
5710 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5712 /* If these operations "cancel" each other, we have the main
5713 optimizations of this pass, which occur when either constant is a
5714 multiple of the other, in which case we replace this with either an
5715 operation or CODE or TCODE.
5717 If we have an unsigned type that is not a sizetype, we cannot do
5718 this since it will change the result if the original computation
5720 if ((! TYPE_UNSIGNED (ctype)
5721 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5723 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5724 || (tcode == MULT_EXPR
5725 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5726 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5728 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5729 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5730 fold_convert (ctype,
5731 const_binop (TRUNC_DIV_EXPR,
5733 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5734 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5735 fold_convert (ctype,
5736 const_binop (TRUNC_DIV_EXPR,
5748 /* Return a node which has the indicated constant VALUE (either 0 or
5749 1), and is of the indicated TYPE. */
5752 constant_boolean_node (int value, tree type)
5754 if (type == integer_type_node)
5755 return value ? integer_one_node : integer_zero_node;
5756 else if (type == boolean_type_node)
5757 return value ? boolean_true_node : boolean_false_node;
5759 return build_int_cst (type, value);
5763 /* Return true if expr looks like an ARRAY_REF and set base and
5764 offset to the appropriate trees. If there is no offset,
5765 offset is set to NULL_TREE. Base will be canonicalized to
5766 something you can get the element type from using
5767 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5768 in bytes to the base. */
5771 extract_array_ref (tree expr, tree *base, tree *offset)
5773 /* One canonical form is a PLUS_EXPR with the first
5774 argument being an ADDR_EXPR with a possible NOP_EXPR
5776 if (TREE_CODE (expr) == PLUS_EXPR)
5778 tree op0 = TREE_OPERAND (expr, 0);
5779 tree inner_base, dummy1;
5780 /* Strip NOP_EXPRs here because the C frontends and/or
5781 folders present us (int *)&x.a + 4B possibly. */
5783 if (extract_array_ref (op0, &inner_base, &dummy1))
5786 if (dummy1 == NULL_TREE)
5787 *offset = TREE_OPERAND (expr, 1);
5789 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5790 dummy1, TREE_OPERAND (expr, 1));
5794 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5795 which we transform into an ADDR_EXPR with appropriate
5796 offset. For other arguments to the ADDR_EXPR we assume
5797 zero offset and as such do not care about the ADDR_EXPR
5798 type and strip possible nops from it. */
5799 else if (TREE_CODE (expr) == ADDR_EXPR)
5801 tree op0 = TREE_OPERAND (expr, 0);
5802 if (TREE_CODE (op0) == ARRAY_REF)
5804 tree idx = TREE_OPERAND (op0, 1);
5805 *base = TREE_OPERAND (op0, 0);
5806 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5807 array_ref_element_size (op0));
5811 /* Handle array-to-pointer decay as &a. */
5812 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5813 *base = TREE_OPERAND (expr, 0);
5816 *offset = NULL_TREE;
5820 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5821 else if (SSA_VAR_P (expr)
5822 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5825 *offset = NULL_TREE;
5833 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5834 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5835 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5836 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5837 COND is the first argument to CODE; otherwise (as in the example
5838 given here), it is the second argument. TYPE is the type of the
5839 original expression. Return NULL_TREE if no simplification is
5843 fold_binary_op_with_conditional_arg (enum tree_code code,
5844 tree type, tree op0, tree op1,
5845 tree cond, tree arg, int cond_first_p)
5847 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5848 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5849 tree test, true_value, false_value;
5850 tree lhs = NULL_TREE;
5851 tree rhs = NULL_TREE;
5853 /* This transformation is only worthwhile if we don't have to wrap
5854 arg in a SAVE_EXPR, and the operation can be simplified on at least
5855 one of the branches once its pushed inside the COND_EXPR. */
5856 if (!TREE_CONSTANT (arg))
5859 if (TREE_CODE (cond) == COND_EXPR)
5861 test = TREE_OPERAND (cond, 0);
5862 true_value = TREE_OPERAND (cond, 1);
5863 false_value = TREE_OPERAND (cond, 2);
5864 /* If this operand throws an expression, then it does not make
5865 sense to try to perform a logical or arithmetic operation
5867 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5869 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5874 tree testtype = TREE_TYPE (cond);
5876 true_value = constant_boolean_node (true, testtype);
5877 false_value = constant_boolean_node (false, testtype);
5880 arg = fold_convert (arg_type, arg);
5883 true_value = fold_convert (cond_type, true_value);
5885 lhs = fold_build2 (code, type, true_value, arg);
5887 lhs = fold_build2 (code, type, arg, true_value);
5891 false_value = fold_convert (cond_type, false_value);
5893 rhs = fold_build2 (code, type, false_value, arg);
5895 rhs = fold_build2 (code, type, arg, false_value);
5898 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5899 return fold_convert (type, test);
5903 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5905 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5906 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5907 ADDEND is the same as X.
5909 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5910 and finite. The problematic cases are when X is zero, and its mode
5911 has signed zeros. In the case of rounding towards -infinity,
5912 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5913 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5916 fold_real_zero_addition_p (tree type, tree addend, int negate)
5918 if (!real_zerop (addend))
5921 /* Don't allow the fold with -fsignaling-nans. */
5922 if (HONOR_SNANS (TYPE_MODE (type)))
5925 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5926 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5929 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5930 if (TREE_CODE (addend) == REAL_CST
5931 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5934 /* The mode has signed zeros, and we have to honor their sign.
5935 In this situation, there is only one case we can return true for.
5936 X - 0 is the same as X unless rounding towards -infinity is
5938 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5941 /* Subroutine of fold() that checks comparisons of built-in math
5942 functions against real constants.
5944 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5945 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5946 is the type of the result and ARG0 and ARG1 are the operands of the
5947 comparison. ARG1 must be a TREE_REAL_CST.
5949 The function returns the constant folded tree if a simplification
5950 can be made, and NULL_TREE otherwise. */
5953 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5954 tree type, tree arg0, tree arg1)
5958 if (BUILTIN_SQRT_P (fcode))
5960 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5961 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5963 c = TREE_REAL_CST (arg1);
5964 if (REAL_VALUE_NEGATIVE (c))
5966 /* sqrt(x) < y is always false, if y is negative. */
5967 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5968 return omit_one_operand (type, integer_zero_node, arg);
5970 /* sqrt(x) > y is always true, if y is negative and we
5971 don't care about NaNs, i.e. negative values of x. */
5972 if (code == NE_EXPR || !HONOR_NANS (mode))
5973 return omit_one_operand (type, integer_one_node, arg);
5975 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5976 return fold_build2 (GE_EXPR, type, arg,
5977 build_real (TREE_TYPE (arg), dconst0));
5979 else if (code == GT_EXPR || code == GE_EXPR)
5983 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5984 real_convert (&c2, mode, &c2);
5986 if (REAL_VALUE_ISINF (c2))
5988 /* sqrt(x) > y is x == +Inf, when y is very large. */
5989 if (HONOR_INFINITIES (mode))
5990 return fold_build2 (EQ_EXPR, type, arg,
5991 build_real (TREE_TYPE (arg), c2));
5993 /* sqrt(x) > y is always false, when y is very large
5994 and we don't care about infinities. */
5995 return omit_one_operand (type, integer_zero_node, arg);
5998 /* sqrt(x) > c is the same as x > c*c. */
5999 return fold_build2 (code, type, arg,
6000 build_real (TREE_TYPE (arg), c2));
6002 else if (code == LT_EXPR || code == LE_EXPR)
6006 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6007 real_convert (&c2, mode, &c2);
6009 if (REAL_VALUE_ISINF (c2))
6011 /* sqrt(x) < y is always true, when y is a very large
6012 value and we don't care about NaNs or Infinities. */
6013 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6014 return omit_one_operand (type, integer_one_node, arg);
6016 /* sqrt(x) < y is x != +Inf when y is very large and we
6017 don't care about NaNs. */
6018 if (! HONOR_NANS (mode))
6019 return fold_build2 (NE_EXPR, type, arg,
6020 build_real (TREE_TYPE (arg), c2));
6022 /* sqrt(x) < y is x >= 0 when y is very large and we
6023 don't care about Infinities. */
6024 if (! HONOR_INFINITIES (mode))
6025 return fold_build2 (GE_EXPR, type, arg,
6026 build_real (TREE_TYPE (arg), dconst0));
6028 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6029 if (lang_hooks.decls.global_bindings_p () != 0
6030 || CONTAINS_PLACEHOLDER_P (arg))
6033 arg = save_expr (arg);
6034 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6035 fold_build2 (GE_EXPR, type, arg,
6036 build_real (TREE_TYPE (arg),
6038 fold_build2 (NE_EXPR, type, arg,
6039 build_real (TREE_TYPE (arg),
6043 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6044 if (! HONOR_NANS (mode))
6045 return fold_build2 (code, type, arg,
6046 build_real (TREE_TYPE (arg), c2));
6048 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6049 if (lang_hooks.decls.global_bindings_p () == 0
6050 && ! CONTAINS_PLACEHOLDER_P (arg))
6052 arg = save_expr (arg);
6053 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6054 fold_build2 (GE_EXPR, type, arg,
6055 build_real (TREE_TYPE (arg),
6057 fold_build2 (code, type, arg,
6058 build_real (TREE_TYPE (arg),
6067 /* Subroutine of fold() that optimizes comparisons against Infinities,
6068 either +Inf or -Inf.
6070 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6071 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6072 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6074 The function returns the constant folded tree if a simplification
6075 can be made, and NULL_TREE otherwise. */
6078 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6080 enum machine_mode mode;
6081 REAL_VALUE_TYPE max;
6085 mode = TYPE_MODE (TREE_TYPE (arg0));
6087 /* For negative infinity swap the sense of the comparison. */
6088 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6090 code = swap_tree_comparison (code);
6095 /* x > +Inf is always false, if with ignore sNANs. */
6096 if (HONOR_SNANS (mode))
6098 return omit_one_operand (type, integer_zero_node, arg0);
6101 /* x <= +Inf is always true, if we don't case about NaNs. */
6102 if (! HONOR_NANS (mode))
6103 return omit_one_operand (type, integer_one_node, arg0);
6105 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6106 if (lang_hooks.decls.global_bindings_p () == 0
6107 && ! CONTAINS_PLACEHOLDER_P (arg0))
6109 arg0 = save_expr (arg0);
6110 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6116 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6117 real_maxval (&max, neg, mode);
6118 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6119 arg0, build_real (TREE_TYPE (arg0), max));
6122 /* x < +Inf is always equal to x <= DBL_MAX. */
6123 real_maxval (&max, neg, mode);
6124 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6125 arg0, build_real (TREE_TYPE (arg0), max));
6128 /* x != +Inf is always equal to !(x > DBL_MAX). */
6129 real_maxval (&max, neg, mode);
6130 if (! HONOR_NANS (mode))
6131 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6132 arg0, build_real (TREE_TYPE (arg0), max));
6134 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6135 arg0, build_real (TREE_TYPE (arg0), max));
6136 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6145 /* Subroutine of fold() that optimizes comparisons of a division by
6146 a nonzero integer constant against an integer constant, i.e.
6149 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6150 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6151 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6153 The function returns the constant folded tree if a simplification
6154 can be made, and NULL_TREE otherwise. */
6157 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6159 tree prod, tmp, hi, lo;
6160 tree arg00 = TREE_OPERAND (arg0, 0);
6161 tree arg01 = TREE_OPERAND (arg0, 1);
6162 unsigned HOST_WIDE_INT lpart;
6163 HOST_WIDE_INT hpart;
6164 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6168 /* We have to do this the hard way to detect unsigned overflow.
6169 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6170 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6171 TREE_INT_CST_HIGH (arg01),
6172 TREE_INT_CST_LOW (arg1),
6173 TREE_INT_CST_HIGH (arg1),
6174 &lpart, &hpart, unsigned_p);
6175 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6176 prod = force_fit_type (prod, -1, overflow, false);
6177 neg_overflow = false;
6181 tmp = int_const_binop (MINUS_EXPR, arg01,
6182 build_int_cst (TREE_TYPE (arg01), 1), 0);
6185 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6186 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6187 TREE_INT_CST_HIGH (prod),
6188 TREE_INT_CST_LOW (tmp),
6189 TREE_INT_CST_HIGH (tmp),
6190 &lpart, &hpart, unsigned_p);
6191 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6192 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6193 TREE_CONSTANT_OVERFLOW (prod));
6195 else if (tree_int_cst_sgn (arg01) >= 0)
6197 tmp = int_const_binop (MINUS_EXPR, arg01,
6198 build_int_cst (TREE_TYPE (arg01), 1), 0);
6199 switch (tree_int_cst_sgn (arg1))
6202 neg_overflow = true;
6203 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6208 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6213 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6223 /* A negative divisor reverses the relational operators. */
6224 code = swap_tree_comparison (code);
6226 tmp = int_const_binop (PLUS_EXPR, arg01,
6227 build_int_cst (TREE_TYPE (arg01), 1), 0);
6228 switch (tree_int_cst_sgn (arg1))
6231 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6236 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6241 neg_overflow = true;
6242 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6254 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6255 return omit_one_operand (type, integer_zero_node, arg00);
6256 if (TREE_OVERFLOW (hi))
6257 return fold_build2 (GE_EXPR, type, arg00, lo);
6258 if (TREE_OVERFLOW (lo))
6259 return fold_build2 (LE_EXPR, type, arg00, hi);
6260 return build_range_check (type, arg00, 1, lo, hi);
6263 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6264 return omit_one_operand (type, integer_one_node, arg00);
6265 if (TREE_OVERFLOW (hi))
6266 return fold_build2 (LT_EXPR, type, arg00, lo);
6267 if (TREE_OVERFLOW (lo))
6268 return fold_build2 (GT_EXPR, type, arg00, hi);
6269 return build_range_check (type, arg00, 0, lo, hi);
6272 if (TREE_OVERFLOW (lo))
6274 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6275 return omit_one_operand (type, tmp, arg00);
6277 return fold_build2 (LT_EXPR, type, arg00, lo);
6280 if (TREE_OVERFLOW (hi))
6282 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6283 return omit_one_operand (type, tmp, arg00);
6285 return fold_build2 (LE_EXPR, type, arg00, hi);
6288 if (TREE_OVERFLOW (hi))
6290 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6291 return omit_one_operand (type, tmp, arg00);
6293 return fold_build2 (GT_EXPR, type, arg00, hi);
6296 if (TREE_OVERFLOW (lo))
6298 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6299 return omit_one_operand (type, tmp, arg00);
6301 return fold_build2 (GE_EXPR, type, arg00, lo);
6311 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6312 equality/inequality test, then return a simplified form of the test
6313 using a sign testing. Otherwise return NULL. TYPE is the desired
6317 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6320 /* If this is testing a single bit, we can optimize the test. */
6321 if ((code == NE_EXPR || code == EQ_EXPR)
6322 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6323 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6325 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6326 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6327 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6329 if (arg00 != NULL_TREE
6330 /* This is only a win if casting to a signed type is cheap,
6331 i.e. when arg00's type is not a partial mode. */
6332 && TYPE_PRECISION (TREE_TYPE (arg00))
6333 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6335 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6336 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6337 result_type, fold_convert (stype, arg00),
6338 build_int_cst (stype, 0));
6345 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6346 equality/inequality test, then return a simplified form of
6347 the test using shifts and logical operations. Otherwise return
6348 NULL. TYPE is the desired result type. */
6351 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6354 /* If this is testing a single bit, we can optimize the test. */
6355 if ((code == NE_EXPR || code == EQ_EXPR)
6356 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6357 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6359 tree inner = TREE_OPERAND (arg0, 0);
6360 tree type = TREE_TYPE (arg0);
6361 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6362 enum machine_mode operand_mode = TYPE_MODE (type);
6364 tree signed_type, unsigned_type, intermediate_type;
6367 /* First, see if we can fold the single bit test into a sign-bit
6369 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6374 /* Otherwise we have (A & C) != 0 where C is a single bit,
6375 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6376 Similarly for (A & C) == 0. */
6378 /* If INNER is a right shift of a constant and it plus BITNUM does
6379 not overflow, adjust BITNUM and INNER. */
6380 if (TREE_CODE (inner) == RSHIFT_EXPR
6381 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6382 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6383 && bitnum < TYPE_PRECISION (type)
6384 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6385 bitnum - TYPE_PRECISION (type)))
6387 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6388 inner = TREE_OPERAND (inner, 0);
6391 /* If we are going to be able to omit the AND below, we must do our
6392 operations as unsigned. If we must use the AND, we have a choice.
6393 Normally unsigned is faster, but for some machines signed is. */
6394 #ifdef LOAD_EXTEND_OP
6395 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6396 && !flag_syntax_only) ? 0 : 1;
6401 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6402 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6403 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6404 inner = fold_convert (intermediate_type, inner);
6407 inner = build2 (RSHIFT_EXPR, intermediate_type,
6408 inner, size_int (bitnum));
6410 one = build_int_cst (intermediate_type, 1);
6412 if (code == EQ_EXPR)
6413 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6415 /* Put the AND last so it can combine with more things. */
6416 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6418 /* Make sure to return the proper type. */
6419 inner = fold_convert (result_type, inner);
6426 /* Check whether we are allowed to reorder operands arg0 and arg1,
6427 such that the evaluation of arg1 occurs before arg0. */
6430 reorder_operands_p (tree arg0, tree arg1)
6432 if (! flag_evaluation_order)
6434 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6436 return ! TREE_SIDE_EFFECTS (arg0)
6437 && ! TREE_SIDE_EFFECTS (arg1);
6440 /* Test whether it is preferable two swap two operands, ARG0 and
6441 ARG1, for example because ARG0 is an integer constant and ARG1
6442 isn't. If REORDER is true, only recommend swapping if we can
6443 evaluate the operands in reverse order. */
6446 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6448 STRIP_SIGN_NOPS (arg0);
6449 STRIP_SIGN_NOPS (arg1);
6451 if (TREE_CODE (arg1) == INTEGER_CST)
6453 if (TREE_CODE (arg0) == INTEGER_CST)
6456 if (TREE_CODE (arg1) == REAL_CST)
6458 if (TREE_CODE (arg0) == REAL_CST)
6461 if (TREE_CODE (arg1) == COMPLEX_CST)
6463 if (TREE_CODE (arg0) == COMPLEX_CST)
6466 if (TREE_CONSTANT (arg1))
6468 if (TREE_CONSTANT (arg0))
6474 if (reorder && flag_evaluation_order
6475 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6483 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6484 for commutative and comparison operators. Ensuring a canonical
6485 form allows the optimizers to find additional redundancies without
6486 having to explicitly check for both orderings. */
6487 if (TREE_CODE (arg0) == SSA_NAME
6488 && TREE_CODE (arg1) == SSA_NAME
6489 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6495 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6496 ARG0 is extended to a wider type. */
6499 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6501 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6503 tree shorter_type, outer_type;
6507 if (arg0_unw == arg0)
6509 shorter_type = TREE_TYPE (arg0_unw);
6511 #ifdef HAVE_canonicalize_funcptr_for_compare
6512 /* Disable this optimization if we're casting a function pointer
6513 type on targets that require function pointer canonicalization. */
6514 if (HAVE_canonicalize_funcptr_for_compare
6515 && TREE_CODE (shorter_type) == POINTER_TYPE
6516 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6520 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6523 arg1_unw = get_unwidened (arg1, shorter_type);
6525 /* If possible, express the comparison in the shorter mode. */
6526 if ((code == EQ_EXPR || code == NE_EXPR
6527 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6528 && (TREE_TYPE (arg1_unw) == shorter_type
6529 || (TREE_CODE (arg1_unw) == INTEGER_CST
6530 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6531 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6532 && int_fits_type_p (arg1_unw, shorter_type))))
6533 return fold_build2 (code, type, arg0_unw,
6534 fold_convert (shorter_type, arg1_unw));
6536 if (TREE_CODE (arg1_unw) != INTEGER_CST
6537 || TREE_CODE (shorter_type) != INTEGER_TYPE
6538 || !int_fits_type_p (arg1_unw, shorter_type))
6541 /* If we are comparing with the integer that does not fit into the range
6542 of the shorter type, the result is known. */
6543 outer_type = TREE_TYPE (arg1_unw);
6544 min = lower_bound_in_type (outer_type, shorter_type);
6545 max = upper_bound_in_type (outer_type, shorter_type);
6547 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6549 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6556 return omit_one_operand (type, integer_zero_node, arg0);
6561 return omit_one_operand (type, integer_one_node, arg0);
6567 return omit_one_operand (type, integer_one_node, arg0);
6569 return omit_one_operand (type, integer_zero_node, arg0);
6574 return omit_one_operand (type, integer_zero_node, arg0);
6576 return omit_one_operand (type, integer_one_node, arg0);
6585 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6586 ARG0 just the signedness is changed. */
6589 fold_sign_changed_comparison (enum tree_code code, tree type,
6590 tree arg0, tree arg1)
6592 tree arg0_inner, tmp;
6593 tree inner_type, outer_type;
6595 if (TREE_CODE (arg0) != NOP_EXPR
6596 && TREE_CODE (arg0) != CONVERT_EXPR)
6599 outer_type = TREE_TYPE (arg0);
6600 arg0_inner = TREE_OPERAND (arg0, 0);
6601 inner_type = TREE_TYPE (arg0_inner);
6603 #ifdef HAVE_canonicalize_funcptr_for_compare
6604 /* Disable this optimization if we're casting a function pointer
6605 type on targets that require function pointer canonicalization. */
6606 if (HAVE_canonicalize_funcptr_for_compare
6607 && TREE_CODE (inner_type) == POINTER_TYPE
6608 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6612 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6615 if (TREE_CODE (arg1) != INTEGER_CST
6616 && !((TREE_CODE (arg1) == NOP_EXPR
6617 || TREE_CODE (arg1) == CONVERT_EXPR)
6618 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6621 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6626 if (TREE_CODE (arg1) == INTEGER_CST)
6628 tmp = build_int_cst_wide (inner_type,
6629 TREE_INT_CST_LOW (arg1),
6630 TREE_INT_CST_HIGH (arg1));
6631 arg1 = force_fit_type (tmp, 0,
6632 TREE_OVERFLOW (arg1),
6633 TREE_CONSTANT_OVERFLOW (arg1));
6636 arg1 = fold_convert (inner_type, arg1);
6638 return fold_build2 (code, type, arg0_inner, arg1);
6641 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6642 step of the array. Reconstructs s and delta in the case of s * delta
6643 being an integer constant (and thus already folded).
6644 ADDR is the address. MULT is the multiplicative expression.
6645 If the function succeeds, the new address expression is returned. Otherwise
6646 NULL_TREE is returned. */
6649 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6651 tree s, delta, step;
6652 tree ref = TREE_OPERAND (addr, 0), pref;
6656 /* Canonicalize op1 into a possibly non-constant delta
6657 and an INTEGER_CST s. */
6658 if (TREE_CODE (op1) == MULT_EXPR)
6660 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6665 if (TREE_CODE (arg0) == INTEGER_CST)
6670 else if (TREE_CODE (arg1) == INTEGER_CST)
6678 else if (TREE_CODE (op1) == INTEGER_CST)
6685 /* Simulate we are delta * 1. */
6687 s = integer_one_node;
6690 for (;; ref = TREE_OPERAND (ref, 0))
6692 if (TREE_CODE (ref) == ARRAY_REF)
6694 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6698 step = array_ref_element_size (ref);
6699 if (TREE_CODE (step) != INTEGER_CST)
6704 if (! tree_int_cst_equal (step, s))
6709 /* Try if delta is a multiple of step. */
6710 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6719 if (!handled_component_p (ref))
6723 /* We found the suitable array reference. So copy everything up to it,
6724 and replace the index. */
6726 pref = TREE_OPERAND (addr, 0);
6727 ret = copy_node (pref);
6732 pref = TREE_OPERAND (pref, 0);
6733 TREE_OPERAND (pos, 0) = copy_node (pref);
6734 pos = TREE_OPERAND (pos, 0);
6737 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6738 fold_convert (itype,
6739 TREE_OPERAND (pos, 1)),
6740 fold_convert (itype, delta));
6742 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6746 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6747 means A >= Y && A != MAX, but in this case we know that
6748 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6751 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6753 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6755 if (TREE_CODE (bound) == LT_EXPR)
6756 a = TREE_OPERAND (bound, 0);
6757 else if (TREE_CODE (bound) == GT_EXPR)
6758 a = TREE_OPERAND (bound, 1);
6762 typea = TREE_TYPE (a);
6763 if (!INTEGRAL_TYPE_P (typea)
6764 && !POINTER_TYPE_P (typea))
6767 if (TREE_CODE (ineq) == LT_EXPR)
6769 a1 = TREE_OPERAND (ineq, 1);
6770 y = TREE_OPERAND (ineq, 0);
6772 else if (TREE_CODE (ineq) == GT_EXPR)
6774 a1 = TREE_OPERAND (ineq, 0);
6775 y = TREE_OPERAND (ineq, 1);
6780 if (TREE_TYPE (a1) != typea)
6783 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6784 if (!integer_onep (diff))
6787 return fold_build2 (GE_EXPR, type, a, y);
6790 /* Fold a sum or difference of at least one multiplication.
6791 Returns the folded tree or NULL if no simplification could be made. */
6794 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6796 tree arg00, arg01, arg10, arg11;
6797 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6799 /* (A * C) +- (B * C) -> (A+-B) * C.
6800 (A * C) +- A -> A * (C+-1).
6801 We are most concerned about the case where C is a constant,
6802 but other combinations show up during loop reduction. Since
6803 it is not difficult, try all four possibilities. */
6805 if (TREE_CODE (arg0) == MULT_EXPR)
6807 arg00 = TREE_OPERAND (arg0, 0);
6808 arg01 = TREE_OPERAND (arg0, 1);
6813 arg01 = build_one_cst (type);
6815 if (TREE_CODE (arg1) == MULT_EXPR)
6817 arg10 = TREE_OPERAND (arg1, 0);
6818 arg11 = TREE_OPERAND (arg1, 1);
6823 arg11 = build_one_cst (type);
6827 if (operand_equal_p (arg01, arg11, 0))
6828 same = arg01, alt0 = arg00, alt1 = arg10;
6829 else if (operand_equal_p (arg00, arg10, 0))
6830 same = arg00, alt0 = arg01, alt1 = arg11;
6831 else if (operand_equal_p (arg00, arg11, 0))
6832 same = arg00, alt0 = arg01, alt1 = arg10;
6833 else if (operand_equal_p (arg01, arg10, 0))
6834 same = arg01, alt0 = arg00, alt1 = arg11;
6836 /* No identical multiplicands; see if we can find a common
6837 power-of-two factor in non-power-of-two multiplies. This
6838 can help in multi-dimensional array access. */
6839 else if (host_integerp (arg01, 0)
6840 && host_integerp (arg11, 0))
6842 HOST_WIDE_INT int01, int11, tmp;
6845 int01 = TREE_INT_CST_LOW (arg01);
6846 int11 = TREE_INT_CST_LOW (arg11);
6848 /* Move min of absolute values to int11. */
6849 if ((int01 >= 0 ? int01 : -int01)
6850 < (int11 >= 0 ? int11 : -int11))
6852 tmp = int01, int01 = int11, int11 = tmp;
6853 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6860 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6862 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6863 build_int_cst (TREE_TYPE (arg00),
6868 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6873 return fold_build2 (MULT_EXPR, type,
6874 fold_build2 (code, type,
6875 fold_convert (type, alt0),
6876 fold_convert (type, alt1)),
6877 fold_convert (type, same));
6882 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6883 specified by EXPR into the buffer PTR of length LEN bytes.
6884 Return the number of bytes placed in the buffer, or zero
6888 native_encode_int (tree expr, unsigned char *ptr, int len)
6890 tree type = TREE_TYPE (expr);
6891 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6892 int byte, offset, word, words;
6893 unsigned char value;
6895 if (total_bytes > len)
6897 words = total_bytes / UNITS_PER_WORD;
6899 for (byte = 0; byte < total_bytes; byte++)
6901 int bitpos = byte * BITS_PER_UNIT;
6902 if (bitpos < HOST_BITS_PER_WIDE_INT)
6903 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6905 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6906 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6908 if (total_bytes > UNITS_PER_WORD)
6910 word = byte / UNITS_PER_WORD;
6911 if (WORDS_BIG_ENDIAN)
6912 word = (words - 1) - word;
6913 offset = word * UNITS_PER_WORD;
6914 if (BYTES_BIG_ENDIAN)
6915 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6917 offset += byte % UNITS_PER_WORD;
6920 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6921 ptr[offset] = value;
6927 /* Subroutine of native_encode_expr. Encode the REAL_CST
6928 specified by EXPR into the buffer PTR of length LEN bytes.
6929 Return the number of bytes placed in the buffer, or zero
6933 native_encode_real (tree expr, unsigned char *ptr, int len)
6935 tree type = TREE_TYPE (expr);
6936 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6937 int byte, offset, word, words;
6938 unsigned char value;
6940 /* There are always 32 bits in each long, no matter the size of
6941 the hosts long. We handle floating point representations with
6945 if (total_bytes > len)
6947 words = total_bytes / UNITS_PER_WORD;
6949 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6951 for (byte = 0; byte < total_bytes; byte++)
6953 int bitpos = byte * BITS_PER_UNIT;
6954 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6956 if (total_bytes > UNITS_PER_WORD)
6958 word = byte / UNITS_PER_WORD;
6959 if (FLOAT_WORDS_BIG_ENDIAN)
6960 word = (words - 1) - word;
6961 offset = word * UNITS_PER_WORD;
6962 if (BYTES_BIG_ENDIAN)
6963 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6965 offset += byte % UNITS_PER_WORD;
6968 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6969 ptr[offset] = value;
6974 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6975 specified by EXPR into the buffer PTR of length LEN bytes.
6976 Return the number of bytes placed in the buffer, or zero
6980 native_encode_complex (tree expr, unsigned char *ptr, int len)
6985 part = TREE_REALPART (expr);
6986 rsize = native_encode_expr (part, ptr, len);
6989 part = TREE_IMAGPART (expr);
6990 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6993 return rsize + isize;
6997 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6998 specified by EXPR into the buffer PTR of length LEN bytes.
6999 Return the number of bytes placed in the buffer, or zero
7003 native_encode_vector (tree expr, unsigned char *ptr, int len)
7005 int i, size, offset, count;
7006 tree itype, elem, elements;
7009 elements = TREE_VECTOR_CST_ELTS (expr);
7010 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7011 itype = TREE_TYPE (TREE_TYPE (expr));
7012 size = GET_MODE_SIZE (TYPE_MODE (itype));
7013 for (i = 0; i < count; i++)
7017 elem = TREE_VALUE (elements);
7018 elements = TREE_CHAIN (elements);
7025 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7030 if (offset + size > len)
7032 memset (ptr+offset, 0, size);
7040 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7041 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7042 buffer PTR of length LEN bytes. Return the number of bytes
7043 placed in the buffer, or zero upon failure. */
7046 native_encode_expr (tree expr, unsigned char *ptr, int len)
7048 switch (TREE_CODE (expr))
7051 return native_encode_int (expr, ptr, len);
7054 return native_encode_real (expr, ptr, len);
7057 return native_encode_complex (expr, ptr, len);
7060 return native_encode_vector (expr, ptr, len);
7068 /* Subroutine of native_interpret_expr. Interpret the contents of
7069 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7070 If the buffer cannot be interpreted, return NULL_TREE. */
7073 native_interpret_int (tree type, unsigned char *ptr, int len)
7075 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7076 int byte, offset, word, words;
7077 unsigned char value;
7078 unsigned int HOST_WIDE_INT lo = 0;
7079 HOST_WIDE_INT hi = 0;
7081 if (total_bytes > len)
7083 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7085 words = total_bytes / UNITS_PER_WORD;
7087 for (byte = 0; byte < total_bytes; byte++)
7089 int bitpos = byte * BITS_PER_UNIT;
7090 if (total_bytes > UNITS_PER_WORD)
7092 word = byte / UNITS_PER_WORD;
7093 if (WORDS_BIG_ENDIAN)
7094 word = (words - 1) - word;
7095 offset = word * UNITS_PER_WORD;
7096 if (BYTES_BIG_ENDIAN)
7097 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7099 offset += byte % UNITS_PER_WORD;
7102 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7103 value = ptr[offset];
7105 if (bitpos < HOST_BITS_PER_WIDE_INT)
7106 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7108 hi |= (unsigned HOST_WIDE_INT) value
7109 << (bitpos - HOST_BITS_PER_WIDE_INT);
7112 return build_int_cst_wide_type (type, lo, hi);
7116 /* Subroutine of native_interpret_expr. Interpret the contents of
7117 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7118 If the buffer cannot be interpreted, return NULL_TREE. */
7121 native_interpret_real (tree type, unsigned char *ptr, int len)
7123 enum machine_mode mode = TYPE_MODE (type);
7124 int total_bytes = GET_MODE_SIZE (mode);
7125 int byte, offset, word, words;
7126 unsigned char value;
7127 /* There are always 32 bits in each long, no matter the size of
7128 the hosts long. We handle floating point representations with
7133 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7134 if (total_bytes > len || total_bytes > 24)
7136 words = total_bytes / UNITS_PER_WORD;
7138 memset (tmp, 0, sizeof (tmp));
7139 for (byte = 0; byte < total_bytes; byte++)
7141 int bitpos = byte * BITS_PER_UNIT;
7142 if (total_bytes > UNITS_PER_WORD)
7144 word = byte / UNITS_PER_WORD;
7145 if (FLOAT_WORDS_BIG_ENDIAN)
7146 word = (words - 1) - word;
7147 offset = word * UNITS_PER_WORD;
7148 if (BYTES_BIG_ENDIAN)
7149 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7151 offset += byte % UNITS_PER_WORD;
7154 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7155 value = ptr[offset];
7157 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7160 real_from_target (&r, tmp, mode);
7161 return build_real (type, r);
7165 /* Subroutine of native_interpret_expr. Interpret the contents of
7166 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7167 If the buffer cannot be interpreted, return NULL_TREE. */
7170 native_interpret_complex (tree type, unsigned char *ptr, int len)
7172 tree etype, rpart, ipart;
7175 etype = TREE_TYPE (type);
7176 size = GET_MODE_SIZE (TYPE_MODE (etype));
7179 rpart = native_interpret_expr (etype, ptr, size);
7182 ipart = native_interpret_expr (etype, ptr+size, size);
7185 return build_complex (type, rpart, ipart);
7189 /* Subroutine of native_interpret_expr. Interpret the contents of
7190 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7191 If the buffer cannot be interpreted, return NULL_TREE. */
7194 native_interpret_vector (tree type, unsigned char *ptr, int len)
7196 tree etype, elem, elements;
7199 etype = TREE_TYPE (type);
7200 size = GET_MODE_SIZE (TYPE_MODE (etype));
7201 count = TYPE_VECTOR_SUBPARTS (type);
7202 if (size * count > len)
7205 elements = NULL_TREE;
7206 for (i = count - 1; i >= 0; i--)
7208 elem = native_interpret_expr (etype, ptr+(i*size), size);
7211 elements = tree_cons (NULL_TREE, elem, elements);
7213 return build_vector (type, elements);
7217 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7218 the buffer PTR of length LEN as a constant of type TYPE. For
7219 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7220 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7221 return NULL_TREE. */
7224 native_interpret_expr (tree type, unsigned char *ptr, int len)
7226 switch (TREE_CODE (type))
7231 return native_interpret_int (type, ptr, len);
7234 return native_interpret_real (type, ptr, len);
7237 return native_interpret_complex (type, ptr, len);
7240 return native_interpret_vector (type, ptr, len);
7248 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7249 TYPE at compile-time. If we're unable to perform the conversion
7250 return NULL_TREE. */
7253 fold_view_convert_expr (tree type, tree expr)
7255 /* We support up to 512-bit values (for V8DFmode). */
7256 unsigned char buffer[64];
7259 /* Check that the host and target are sane. */
7260 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7263 len = native_encode_expr (expr, buffer, sizeof (buffer));
7267 return native_interpret_expr (type, buffer, len);
7271 /* Fold a unary expression of code CODE and type TYPE with operand
7272 OP0. Return the folded expression if folding is successful.
7273 Otherwise, return NULL_TREE. */
7276 fold_unary (enum tree_code code, tree type, tree op0)
7280 enum tree_code_class kind = TREE_CODE_CLASS (code);
7282 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7283 && TREE_CODE_LENGTH (code) == 1);
7288 if (code == NOP_EXPR || code == CONVERT_EXPR
7289 || code == FLOAT_EXPR || code == ABS_EXPR)
7291 /* Don't use STRIP_NOPS, because signedness of argument type
7293 STRIP_SIGN_NOPS (arg0);
7297 /* Strip any conversions that don't change the mode. This
7298 is safe for every expression, except for a comparison
7299 expression because its signedness is derived from its
7302 Note that this is done as an internal manipulation within
7303 the constant folder, in order to find the simplest
7304 representation of the arguments so that their form can be
7305 studied. In any cases, the appropriate type conversions
7306 should be put back in the tree that will get out of the
7312 if (TREE_CODE_CLASS (code) == tcc_unary)
7314 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7315 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7316 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7317 else if (TREE_CODE (arg0) == COND_EXPR)
7319 tree arg01 = TREE_OPERAND (arg0, 1);
7320 tree arg02 = TREE_OPERAND (arg0, 2);
7321 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7322 arg01 = fold_build1 (code, type, arg01);
7323 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7324 arg02 = fold_build1 (code, type, arg02);
7325 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7328 /* If this was a conversion, and all we did was to move into
7329 inside the COND_EXPR, bring it back out. But leave it if
7330 it is a conversion from integer to integer and the
7331 result precision is no wider than a word since such a
7332 conversion is cheap and may be optimized away by combine,
7333 while it couldn't if it were outside the COND_EXPR. Then return
7334 so we don't get into an infinite recursion loop taking the
7335 conversion out and then back in. */
7337 if ((code == NOP_EXPR || code == CONVERT_EXPR
7338 || code == NON_LVALUE_EXPR)
7339 && TREE_CODE (tem) == COND_EXPR
7340 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7341 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7342 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7343 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7344 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7345 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7346 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7348 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7349 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7350 || flag_syntax_only))
7351 tem = build1 (code, type,
7353 TREE_TYPE (TREE_OPERAND
7354 (TREE_OPERAND (tem, 1), 0)),
7355 TREE_OPERAND (tem, 0),
7356 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7357 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7360 else if (COMPARISON_CLASS_P (arg0))
7362 if (TREE_CODE (type) == BOOLEAN_TYPE)
7364 arg0 = copy_node (arg0);
7365 TREE_TYPE (arg0) = type;
7368 else if (TREE_CODE (type) != INTEGER_TYPE)
7369 return fold_build3 (COND_EXPR, type, arg0,
7370 fold_build1 (code, type,
7372 fold_build1 (code, type,
7373 integer_zero_node));
7382 case FIX_TRUNC_EXPR:
7383 if (TREE_TYPE (op0) == type)
7386 /* If we have (type) (a CMP b) and type is an integral type, return
7387 new expression involving the new type. */
7388 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7389 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7390 TREE_OPERAND (op0, 1));
7392 /* Handle cases of two conversions in a row. */
7393 if (TREE_CODE (op0) == NOP_EXPR
7394 || TREE_CODE (op0) == CONVERT_EXPR)
7396 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7397 tree inter_type = TREE_TYPE (op0);
7398 int inside_int = INTEGRAL_TYPE_P (inside_type);
7399 int inside_ptr = POINTER_TYPE_P (inside_type);
7400 int inside_float = FLOAT_TYPE_P (inside_type);
7401 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7402 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7403 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7404 int inter_int = INTEGRAL_TYPE_P (inter_type);
7405 int inter_ptr = POINTER_TYPE_P (inter_type);
7406 int inter_float = FLOAT_TYPE_P (inter_type);
7407 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7408 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7409 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7410 int final_int = INTEGRAL_TYPE_P (type);
7411 int final_ptr = POINTER_TYPE_P (type);
7412 int final_float = FLOAT_TYPE_P (type);
7413 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7414 unsigned int final_prec = TYPE_PRECISION (type);
7415 int final_unsignedp = TYPE_UNSIGNED (type);
7417 /* In addition to the cases of two conversions in a row
7418 handled below, if we are converting something to its own
7419 type via an object of identical or wider precision, neither
7420 conversion is needed. */
7421 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7422 && (((inter_int || inter_ptr) && final_int)
7423 || (inter_float && final_float))
7424 && inter_prec >= final_prec)
7425 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7427 /* Likewise, if the intermediate and final types are either both
7428 float or both integer, we don't need the middle conversion if
7429 it is wider than the final type and doesn't change the signedness
7430 (for integers). Avoid this if the final type is a pointer
7431 since then we sometimes need the inner conversion. Likewise if
7432 the outer has a precision not equal to the size of its mode. */
7433 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7434 || (inter_float && inside_float)
7435 || (inter_vec && inside_vec))
7436 && inter_prec >= inside_prec
7437 && (inter_float || inter_vec
7438 || inter_unsignedp == inside_unsignedp)
7439 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7440 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7442 && (! final_vec || inter_prec == inside_prec))
7443 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7445 /* If we have a sign-extension of a zero-extended value, we can
7446 replace that by a single zero-extension. */
7447 if (inside_int && inter_int && final_int
7448 && inside_prec < inter_prec && inter_prec < final_prec
7449 && inside_unsignedp && !inter_unsignedp)
7450 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7452 /* Two conversions in a row are not needed unless:
7453 - some conversion is floating-point (overstrict for now), or
7454 - some conversion is a vector (overstrict for now), or
7455 - the intermediate type is narrower than both initial and
7457 - the intermediate type and innermost type differ in signedness,
7458 and the outermost type is wider than the intermediate, or
7459 - the initial type is a pointer type and the precisions of the
7460 intermediate and final types differ, or
7461 - the final type is a pointer type and the precisions of the
7462 initial and intermediate types differ.
7463 - the final type is a pointer type and the initial type not
7464 - the initial type is a pointer to an array and the final type
7466 if (! inside_float && ! inter_float && ! final_float
7467 && ! inside_vec && ! inter_vec && ! final_vec
7468 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7469 && ! (inside_int && inter_int
7470 && inter_unsignedp != inside_unsignedp
7471 && inter_prec < final_prec)
7472 && ((inter_unsignedp && inter_prec > inside_prec)
7473 == (final_unsignedp && final_prec > inter_prec))
7474 && ! (inside_ptr && inter_prec != final_prec)
7475 && ! (final_ptr && inside_prec != inter_prec)
7476 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7477 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7478 && final_ptr == inside_ptr
7480 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7481 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7482 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7485 /* Handle (T *)&A.B.C for A being of type T and B and C
7486 living at offset zero. This occurs frequently in
7487 C++ upcasting and then accessing the base. */
7488 if (TREE_CODE (op0) == ADDR_EXPR
7489 && POINTER_TYPE_P (type)
7490 && handled_component_p (TREE_OPERAND (op0, 0)))
7492 HOST_WIDE_INT bitsize, bitpos;
7494 enum machine_mode mode;
7495 int unsignedp, volatilep;
7496 tree base = TREE_OPERAND (op0, 0);
7497 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7498 &mode, &unsignedp, &volatilep, false);
7499 /* If the reference was to a (constant) zero offset, we can use
7500 the address of the base if it has the same base type
7501 as the result type. */
7502 if (! offset && bitpos == 0
7503 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7504 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7505 return fold_convert (type, build_fold_addr_expr (base));
7508 if ((TREE_CODE (op0) == MODIFY_EXPR
7509 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7510 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7511 /* Detect assigning a bitfield. */
7512 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7514 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7516 /* Don't leave an assignment inside a conversion
7517 unless assigning a bitfield. */
7518 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7519 /* First do the assignment, then return converted constant. */
7520 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7521 TREE_NO_WARNING (tem) = 1;
7522 TREE_USED (tem) = 1;
7526 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7527 constants (if x has signed type, the sign bit cannot be set
7528 in c). This folds extension into the BIT_AND_EXPR. */
7529 if (INTEGRAL_TYPE_P (type)
7530 && TREE_CODE (type) != BOOLEAN_TYPE
7531 && TREE_CODE (op0) == BIT_AND_EXPR
7532 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7535 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7538 if (TYPE_UNSIGNED (TREE_TYPE (and))
7539 || (TYPE_PRECISION (type)
7540 <= TYPE_PRECISION (TREE_TYPE (and))))
7542 else if (TYPE_PRECISION (TREE_TYPE (and1))
7543 <= HOST_BITS_PER_WIDE_INT
7544 && host_integerp (and1, 1))
7546 unsigned HOST_WIDE_INT cst;
7548 cst = tree_low_cst (and1, 1);
7549 cst &= (HOST_WIDE_INT) -1
7550 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7551 change = (cst == 0);
7552 #ifdef LOAD_EXTEND_OP
7554 && !flag_syntax_only
7555 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7558 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7559 and0 = fold_convert (uns, and0);
7560 and1 = fold_convert (uns, and1);
7566 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7567 TREE_INT_CST_HIGH (and1));
7568 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7569 TREE_CONSTANT_OVERFLOW (and1));
7570 return fold_build2 (BIT_AND_EXPR, type,
7571 fold_convert (type, and0), tem);
7575 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7576 T2 being pointers to types of the same size. */
7577 if (POINTER_TYPE_P (type)
7578 && BINARY_CLASS_P (arg0)
7579 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7580 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7582 tree arg00 = TREE_OPERAND (arg0, 0);
7584 tree t1 = TREE_TYPE (arg00);
7585 tree tt0 = TREE_TYPE (t0);
7586 tree tt1 = TREE_TYPE (t1);
7587 tree s0 = TYPE_SIZE (tt0);
7588 tree s1 = TYPE_SIZE (tt1);
7590 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7591 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7592 TREE_OPERAND (arg0, 1));
7595 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7596 of the same precision, and X is a integer type not narrower than
7597 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7598 if (INTEGRAL_TYPE_P (type)
7599 && TREE_CODE (op0) == BIT_NOT_EXPR
7600 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7601 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7602 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7603 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7605 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7606 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7607 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7608 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7611 tem = fold_convert_const (code, type, arg0);
7612 return tem ? tem : NULL_TREE;
7614 case VIEW_CONVERT_EXPR:
7615 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7616 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7617 return fold_view_convert_expr (type, op0);
7620 tem = fold_negate_expr (arg0);
7622 return fold_convert (type, tem);
7626 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7627 return fold_abs_const (arg0, type);
7628 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7629 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7630 /* Convert fabs((double)float) into (double)fabsf(float). */
7631 else if (TREE_CODE (arg0) == NOP_EXPR
7632 && TREE_CODE (type) == REAL_TYPE)
7634 tree targ0 = strip_float_extensions (arg0);
7636 return fold_convert (type, fold_build1 (ABS_EXPR,
7640 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7641 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7644 /* Strip sign ops from argument. */
7645 if (TREE_CODE (type) == REAL_TYPE)
7647 tem = fold_strip_sign_ops (arg0);
7649 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7654 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7655 return fold_convert (type, arg0);
7656 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7658 tree itype = TREE_TYPE (type);
7659 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7660 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7661 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7663 if (TREE_CODE (arg0) == COMPLEX_CST)
7665 tree itype = TREE_TYPE (type);
7666 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7667 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7668 return build_complex (type, rpart, negate_expr (ipart));
7670 if (TREE_CODE (arg0) == CONJ_EXPR)
7671 return fold_convert (type, TREE_OPERAND (arg0, 0));
7675 if (TREE_CODE (arg0) == INTEGER_CST)
7676 return fold_not_const (arg0, type);
7677 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7678 return TREE_OPERAND (arg0, 0);
7679 /* Convert ~ (-A) to A - 1. */
7680 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7681 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7682 build_int_cst (type, 1));
7683 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7684 else if (INTEGRAL_TYPE_P (type)
7685 && ((TREE_CODE (arg0) == MINUS_EXPR
7686 && integer_onep (TREE_OPERAND (arg0, 1)))
7687 || (TREE_CODE (arg0) == PLUS_EXPR
7688 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7689 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7690 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7691 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7692 && (tem = fold_unary (BIT_NOT_EXPR, type,
7694 TREE_OPERAND (arg0, 0)))))
7695 return fold_build2 (BIT_XOR_EXPR, type, tem,
7696 fold_convert (type, TREE_OPERAND (arg0, 1)));
7697 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7698 && (tem = fold_unary (BIT_NOT_EXPR, type,
7700 TREE_OPERAND (arg0, 1)))))
7701 return fold_build2 (BIT_XOR_EXPR, type,
7702 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7706 case TRUTH_NOT_EXPR:
7707 /* The argument to invert_truthvalue must have Boolean type. */
7708 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7709 arg0 = fold_convert (boolean_type_node, arg0);
7711 /* Note that the operand of this must be an int
7712 and its values must be 0 or 1.
7713 ("true" is a fixed value perhaps depending on the language,
7714 but we don't handle values other than 1 correctly yet.) */
7715 tem = fold_truth_not_expr (arg0);
7718 return fold_convert (type, tem);
7721 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7722 return fold_convert (type, arg0);
7723 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7724 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7725 TREE_OPERAND (arg0, 1));
7726 if (TREE_CODE (arg0) == COMPLEX_CST)
7727 return fold_convert (type, TREE_REALPART (arg0));
7728 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7730 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7731 tem = fold_build2 (TREE_CODE (arg0), itype,
7732 fold_build1 (REALPART_EXPR, itype,
7733 TREE_OPERAND (arg0, 0)),
7734 fold_build1 (REALPART_EXPR, itype,
7735 TREE_OPERAND (arg0, 1)));
7736 return fold_convert (type, tem);
7738 if (TREE_CODE (arg0) == CONJ_EXPR)
7740 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7741 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7742 return fold_convert (type, tem);
7744 if (TREE_CODE (arg0) == CALL_EXPR)
7746 tree fn = get_callee_fndecl (arg0);
7747 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7748 switch (DECL_FUNCTION_CODE (fn))
7750 CASE_FLT_FN (BUILT_IN_CEXPI):
7751 fn = mathfn_built_in (type, BUILT_IN_COS);
7752 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7760 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7761 return fold_convert (type, integer_zero_node);
7762 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7763 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7764 TREE_OPERAND (arg0, 0));
7765 if (TREE_CODE (arg0) == COMPLEX_CST)
7766 return fold_convert (type, TREE_IMAGPART (arg0));
7767 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7769 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7770 tem = fold_build2 (TREE_CODE (arg0), itype,
7771 fold_build1 (IMAGPART_EXPR, itype,
7772 TREE_OPERAND (arg0, 0)),
7773 fold_build1 (IMAGPART_EXPR, itype,
7774 TREE_OPERAND (arg0, 1)));
7775 return fold_convert (type, tem);
7777 if (TREE_CODE (arg0) == CONJ_EXPR)
7779 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7780 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7781 return fold_convert (type, negate_expr (tem));
7783 if (TREE_CODE (arg0) == CALL_EXPR)
7785 tree fn = get_callee_fndecl (arg0);
7786 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7787 switch (DECL_FUNCTION_CODE (fn))
7789 CASE_FLT_FN (BUILT_IN_CEXPI):
7790 fn = mathfn_built_in (type, BUILT_IN_SIN);
7791 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7800 } /* switch (code) */
7803 /* Fold a binary expression of code CODE and type TYPE with operands
7804 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7805 Return the folded expression if folding is successful. Otherwise,
7806 return NULL_TREE. */
7809 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7811 enum tree_code compl_code;
7813 if (code == MIN_EXPR)
7814 compl_code = MAX_EXPR;
7815 else if (code == MAX_EXPR)
7816 compl_code = MIN_EXPR;
7820 /* MIN (MAX (a, b), b) == b. */
7821 if (TREE_CODE (op0) == compl_code
7822 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7823 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7825 /* MIN (MAX (b, a), b) == b. */
7826 if (TREE_CODE (op0) == compl_code
7827 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7828 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7829 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7831 /* MIN (a, MAX (a, b)) == a. */
7832 if (TREE_CODE (op1) == compl_code
7833 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7834 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7835 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7837 /* MIN (a, MAX (b, a)) == a. */
7838 if (TREE_CODE (op1) == compl_code
7839 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7840 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7841 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7846 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7847 by changing CODE to reduce the magnitude of constants involved in
7848 ARG0 of the comparison.
7849 Returns a canonicalized comparison tree if a simplification was
7850 possible, otherwise returns NULL_TREE. */
7853 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7854 tree arg0, tree arg1)
7856 enum tree_code code0 = TREE_CODE (arg0);
7857 tree t, cst0 = NULL_TREE;
7861 /* Match A +- CST code arg1 and CST code arg1. */
7862 if (!(((code0 == MINUS_EXPR
7863 || code0 == PLUS_EXPR)
7864 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7865 || code0 == INTEGER_CST))
7868 /* Identify the constant in arg0 and its sign. */
7869 if (code0 == INTEGER_CST)
7872 cst0 = TREE_OPERAND (arg0, 1);
7873 sgn0 = tree_int_cst_sgn (cst0);
7875 /* Overflowed constants and zero will cause problems. */
7876 if (integer_zerop (cst0)
7877 || TREE_OVERFLOW (cst0))
7880 /* See if we can reduce the magnitude of the constant in
7881 arg0 by changing the comparison code. */
7882 if (code0 == INTEGER_CST)
7884 /* CST <= arg1 -> CST-1 < arg1. */
7885 if (code == LE_EXPR && sgn0 == 1)
7887 /* -CST < arg1 -> -CST-1 <= arg1. */
7888 else if (code == LT_EXPR && sgn0 == -1)
7890 /* CST > arg1 -> CST-1 >= arg1. */
7891 else if (code == GT_EXPR && sgn0 == 1)
7893 /* -CST >= arg1 -> -CST-1 > arg1. */
7894 else if (code == GE_EXPR && sgn0 == -1)
7898 /* arg1 code' CST' might be more canonical. */
7903 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7905 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7907 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7908 else if (code == GT_EXPR
7909 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7911 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7912 else if (code == LE_EXPR
7913 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7915 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7916 else if (code == GE_EXPR
7917 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7923 /* Now build the constant reduced in magnitude. */
7924 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7925 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7926 if (code0 != INTEGER_CST)
7927 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7929 /* If swapping might yield to a more canonical form, do so. */
7931 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7933 return fold_build2 (code, type, t, arg1);
7936 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7937 overflow further. Try to decrease the magnitude of constants involved
7938 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7939 and put sole constants at the second argument position.
7940 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7943 maybe_canonicalize_comparison (enum tree_code code, tree type,
7944 tree arg0, tree arg1)
7948 /* In principle pointers also have undefined overflow behavior,
7949 but that causes problems elsewhere. */
7950 if ((flag_wrapv || flag_trapv)
7951 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7952 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7955 /* Try canonicalization by simplifying arg0. */
7956 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7960 /* Try canonicalization by simplifying arg1 using the swapped
7962 code = swap_tree_comparison (code);
7963 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7966 /* Subroutine of fold_binary. This routine performs all of the
7967 transformations that are common to the equality/inequality
7968 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7969 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7970 fold_binary should call fold_binary. Fold a comparison with
7971 tree code CODE and type TYPE with operands OP0 and OP1. Return
7972 the folded comparison or NULL_TREE. */
7975 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7977 tree arg0, arg1, tem;
7982 STRIP_SIGN_NOPS (arg0);
7983 STRIP_SIGN_NOPS (arg1);
7985 tem = fold_relational_const (code, type, arg0, arg1);
7986 if (tem != NULL_TREE)
7989 /* If one arg is a real or integer constant, put it last. */
7990 if (tree_swap_operands_p (arg0, arg1, true))
7991 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7993 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7994 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7995 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7996 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7997 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7998 && !(flag_wrapv || flag_trapv))
7999 && (TREE_CODE (arg1) == INTEGER_CST
8000 && !TREE_OVERFLOW (arg1)))
8002 tree const1 = TREE_OPERAND (arg0, 1);
8004 tree variable = TREE_OPERAND (arg0, 0);
8007 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8009 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8010 TREE_TYPE (arg1), const2, const1);
8011 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8012 && (TREE_CODE (lhs) != INTEGER_CST
8013 || !TREE_OVERFLOW (lhs)))
8014 return fold_build2 (code, type, variable, lhs);
8017 /* For comparisons of pointers we can decompose it to a compile time
8018 comparison of the base objects and the offsets into the object.
8019 This requires at least one operand being an ADDR_EXPR to do more
8020 than the operand_equal_p test below. */
8021 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8022 && (TREE_CODE (arg0) == ADDR_EXPR
8023 || TREE_CODE (arg1) == ADDR_EXPR))
8025 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8026 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8027 enum machine_mode mode;
8028 int volatilep, unsignedp;
8029 bool indirect_base0 = false;
8031 /* Get base and offset for the access. Strip ADDR_EXPR for
8032 get_inner_reference, but put it back by stripping INDIRECT_REF
8033 off the base object if possible. */
8035 if (TREE_CODE (arg0) == ADDR_EXPR)
8037 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8038 &bitsize, &bitpos0, &offset0, &mode,
8039 &unsignedp, &volatilep, false);
8040 if (TREE_CODE (base0) == INDIRECT_REF)
8041 base0 = TREE_OPERAND (base0, 0);
8043 indirect_base0 = true;
8047 if (TREE_CODE (arg1) == ADDR_EXPR)
8049 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8050 &bitsize, &bitpos1, &offset1, &mode,
8051 &unsignedp, &volatilep, false);
8052 /* We have to make sure to have an indirect/non-indirect base1
8053 just the same as we did for base0. */
8054 if (TREE_CODE (base1) == INDIRECT_REF
8056 base1 = TREE_OPERAND (base1, 0);
8057 else if (!indirect_base0)
8060 else if (indirect_base0)
8063 /* If we have equivalent bases we might be able to simplify. */
8065 && operand_equal_p (base0, base1, 0))
8067 /* We can fold this expression to a constant if the non-constant
8068 offset parts are equal. */
8069 if (offset0 == offset1
8070 || (offset0 && offset1
8071 && operand_equal_p (offset0, offset1, 0)))
8076 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8078 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8080 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8082 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8084 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8086 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8090 /* We can simplify the comparison to a comparison of the variable
8091 offset parts if the constant offset parts are equal.
8092 Be careful to use signed size type here because otherwise we
8093 mess with array offsets in the wrong way. This is possible
8094 because pointer arithmetic is restricted to retain within an
8095 object and overflow on pointer differences is undefined as of
8096 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8097 else if (bitpos0 == bitpos1)
8099 tree signed_size_type_node;
8100 signed_size_type_node = signed_type_for (size_type_node);
8102 /* By converting to signed size type we cover middle-end pointer
8103 arithmetic which operates on unsigned pointer types of size
8104 type size and ARRAY_REF offsets which are properly sign or
8105 zero extended from their type in case it is narrower than
8107 if (offset0 == NULL_TREE)
8108 offset0 = build_int_cst (signed_size_type_node, 0);
8110 offset0 = fold_convert (signed_size_type_node, offset0);
8111 if (offset1 == NULL_TREE)
8112 offset1 = build_int_cst (signed_size_type_node, 0);
8114 offset1 = fold_convert (signed_size_type_node, offset1);
8116 return fold_build2 (code, type, offset0, offset1);
8121 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8122 same object, then we can fold this to a comparison of the two offsets in
8123 signed size type. This is possible because pointer arithmetic is
8124 restricted to retain within an object and overflow on pointer differences
8125 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8126 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8127 && !flag_wrapv && !flag_trapv)
8129 tree base0, offset0, base1, offset1;
8131 if (extract_array_ref (arg0, &base0, &offset0)
8132 && extract_array_ref (arg1, &base1, &offset1)
8133 && operand_equal_p (base0, base1, 0))
8135 tree signed_size_type_node;
8136 signed_size_type_node = signed_type_for (size_type_node);
8138 /* By converting to signed size type we cover middle-end pointer
8139 arithmetic which operates on unsigned pointer types of size
8140 type size and ARRAY_REF offsets which are properly sign or
8141 zero extended from their type in case it is narrower than
8143 if (offset0 == NULL_TREE)
8144 offset0 = build_int_cst (signed_size_type_node, 0);
8146 offset0 = fold_convert (signed_size_type_node, offset0);
8147 if (offset1 == NULL_TREE)
8148 offset1 = build_int_cst (signed_size_type_node, 0);
8150 offset1 = fold_convert (signed_size_type_node, offset1);
8152 return fold_build2 (code, type, offset0, offset1);
8156 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8157 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8158 the resulting offset is smaller in absolute value than the
8160 if (!(flag_wrapv || flag_trapv)
8161 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8162 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8163 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8164 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8165 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8166 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8167 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8169 tree const1 = TREE_OPERAND (arg0, 1);
8170 tree const2 = TREE_OPERAND (arg1, 1);
8171 tree variable1 = TREE_OPERAND (arg0, 0);
8172 tree variable2 = TREE_OPERAND (arg1, 0);
8175 /* Put the constant on the side where it doesn't overflow and is
8176 of lower absolute value than before. */
8177 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8178 ? MINUS_EXPR : PLUS_EXPR,
8180 if (!TREE_OVERFLOW (cst)
8181 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8182 return fold_build2 (code, type,
8184 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8187 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8188 ? MINUS_EXPR : PLUS_EXPR,
8190 if (!TREE_OVERFLOW (cst)
8191 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8192 return fold_build2 (code, type,
8193 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8198 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8202 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8204 tree targ0 = strip_float_extensions (arg0);
8205 tree targ1 = strip_float_extensions (arg1);
8206 tree newtype = TREE_TYPE (targ0);
8208 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8209 newtype = TREE_TYPE (targ1);
8211 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8212 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8213 return fold_build2 (code, type, fold_convert (newtype, targ0),
8214 fold_convert (newtype, targ1));
8216 /* (-a) CMP (-b) -> b CMP a */
8217 if (TREE_CODE (arg0) == NEGATE_EXPR
8218 && TREE_CODE (arg1) == NEGATE_EXPR)
8219 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8220 TREE_OPERAND (arg0, 0));
8222 if (TREE_CODE (arg1) == REAL_CST)
8224 REAL_VALUE_TYPE cst;
8225 cst = TREE_REAL_CST (arg1);
8227 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8228 if (TREE_CODE (arg0) == NEGATE_EXPR)
8229 return fold_build2 (swap_tree_comparison (code), type,
8230 TREE_OPERAND (arg0, 0),
8231 build_real (TREE_TYPE (arg1),
8232 REAL_VALUE_NEGATE (cst)));
8234 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8235 /* a CMP (-0) -> a CMP 0 */
8236 if (REAL_VALUE_MINUS_ZERO (cst))
8237 return fold_build2 (code, type, arg0,
8238 build_real (TREE_TYPE (arg1), dconst0));
8240 /* x != NaN is always true, other ops are always false. */
8241 if (REAL_VALUE_ISNAN (cst)
8242 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8244 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8245 return omit_one_operand (type, tem, arg0);
8248 /* Fold comparisons against infinity. */
8249 if (REAL_VALUE_ISINF (cst))
8251 tem = fold_inf_compare (code, type, arg0, arg1);
8252 if (tem != NULL_TREE)
8257 /* If this is a comparison of a real constant with a PLUS_EXPR
8258 or a MINUS_EXPR of a real constant, we can convert it into a
8259 comparison with a revised real constant as long as no overflow
8260 occurs when unsafe_math_optimizations are enabled. */
8261 if (flag_unsafe_math_optimizations
8262 && TREE_CODE (arg1) == REAL_CST
8263 && (TREE_CODE (arg0) == PLUS_EXPR
8264 || TREE_CODE (arg0) == MINUS_EXPR)
8265 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8266 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8267 ? MINUS_EXPR : PLUS_EXPR,
8268 arg1, TREE_OPERAND (arg0, 1), 0))
8269 && ! TREE_CONSTANT_OVERFLOW (tem))
8270 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8272 /* Likewise, we can simplify a comparison of a real constant with
8273 a MINUS_EXPR whose first operand is also a real constant, i.e.
8274 (c1 - x) < c2 becomes x > c1-c2. */
8275 if (flag_unsafe_math_optimizations
8276 && TREE_CODE (arg1) == REAL_CST
8277 && TREE_CODE (arg0) == MINUS_EXPR
8278 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8279 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8281 && ! TREE_CONSTANT_OVERFLOW (tem))
8282 return fold_build2 (swap_tree_comparison (code), type,
8283 TREE_OPERAND (arg0, 1), tem);
8285 /* Fold comparisons against built-in math functions. */
8286 if (TREE_CODE (arg1) == REAL_CST
8287 && flag_unsafe_math_optimizations
8288 && ! flag_errno_math)
8290 enum built_in_function fcode = builtin_mathfn_code (arg0);
8292 if (fcode != END_BUILTINS)
8294 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8295 if (tem != NULL_TREE)
8301 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8302 if (TREE_CONSTANT (arg1)
8303 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8304 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8305 /* This optimization is invalid for ordered comparisons
8306 if CONST+INCR overflows or if foo+incr might overflow.
8307 This optimization is invalid for floating point due to rounding.
8308 For pointer types we assume overflow doesn't happen. */
8309 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8310 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8311 && (code == EQ_EXPR || code == NE_EXPR))))
8313 tree varop, newconst;
8315 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8317 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8318 arg1, TREE_OPERAND (arg0, 1));
8319 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8320 TREE_OPERAND (arg0, 0),
8321 TREE_OPERAND (arg0, 1));
8325 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8326 arg1, TREE_OPERAND (arg0, 1));
8327 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8328 TREE_OPERAND (arg0, 0),
8329 TREE_OPERAND (arg0, 1));
8333 /* If VAROP is a reference to a bitfield, we must mask
8334 the constant by the width of the field. */
8335 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8336 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8337 && host_integerp (DECL_SIZE (TREE_OPERAND
8338 (TREE_OPERAND (varop, 0), 1)), 1))
8340 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8341 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8342 tree folded_compare, shift;
8344 /* First check whether the comparison would come out
8345 always the same. If we don't do that we would
8346 change the meaning with the masking. */
8347 folded_compare = fold_build2 (code, type,
8348 TREE_OPERAND (varop, 0), arg1);
8349 if (TREE_CODE (folded_compare) == INTEGER_CST)
8350 return omit_one_operand (type, folded_compare, varop);
8352 shift = build_int_cst (NULL_TREE,
8353 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8354 shift = fold_convert (TREE_TYPE (varop), shift);
8355 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8357 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8361 return fold_build2 (code, type, varop, newconst);
8364 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8365 && (TREE_CODE (arg0) == NOP_EXPR
8366 || TREE_CODE (arg0) == CONVERT_EXPR))
8368 /* If we are widening one operand of an integer comparison,
8369 see if the other operand is similarly being widened. Perhaps we
8370 can do the comparison in the narrower type. */
8371 tem = fold_widened_comparison (code, type, arg0, arg1);
8375 /* Or if we are changing signedness. */
8376 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8381 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8382 constant, we can simplify it. */
8383 if (TREE_CODE (arg1) == INTEGER_CST
8384 && (TREE_CODE (arg0) == MIN_EXPR
8385 || TREE_CODE (arg0) == MAX_EXPR)
8386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8388 tem = optimize_minmax_comparison (code, type, op0, op1);
8393 /* Simplify comparison of something with itself. (For IEEE
8394 floating-point, we can only do some of these simplifications.) */
8395 if (operand_equal_p (arg0, arg1, 0))
8400 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8401 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8402 return constant_boolean_node (1, type);
8407 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8408 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8409 return constant_boolean_node (1, type);
8410 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8413 /* For NE, we can only do this simplification if integer
8414 or we don't honor IEEE floating point NaNs. */
8415 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8416 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8418 /* ... fall through ... */
8421 return constant_boolean_node (0, type);
8427 /* If we are comparing an expression that just has comparisons
8428 of two integer values, arithmetic expressions of those comparisons,
8429 and constants, we can simplify it. There are only three cases
8430 to check: the two values can either be equal, the first can be
8431 greater, or the second can be greater. Fold the expression for
8432 those three values. Since each value must be 0 or 1, we have
8433 eight possibilities, each of which corresponds to the constant 0
8434 or 1 or one of the six possible comparisons.
8436 This handles common cases like (a > b) == 0 but also handles
8437 expressions like ((x > y) - (y > x)) > 0, which supposedly
8438 occur in macroized code. */
8440 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8442 tree cval1 = 0, cval2 = 0;
8445 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8446 /* Don't handle degenerate cases here; they should already
8447 have been handled anyway. */
8448 && cval1 != 0 && cval2 != 0
8449 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8450 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8451 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8452 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8453 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8454 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8455 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8457 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8458 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8460 /* We can't just pass T to eval_subst in case cval1 or cval2
8461 was the same as ARG1. */
8464 = fold_build2 (code, type,
8465 eval_subst (arg0, cval1, maxval,
8469 = fold_build2 (code, type,
8470 eval_subst (arg0, cval1, maxval,
8474 = fold_build2 (code, type,
8475 eval_subst (arg0, cval1, minval,
8479 /* All three of these results should be 0 or 1. Confirm they are.
8480 Then use those values to select the proper code to use. */
8482 if (TREE_CODE (high_result) == INTEGER_CST
8483 && TREE_CODE (equal_result) == INTEGER_CST
8484 && TREE_CODE (low_result) == INTEGER_CST)
8486 /* Make a 3-bit mask with the high-order bit being the
8487 value for `>', the next for '=', and the low for '<'. */
8488 switch ((integer_onep (high_result) * 4)
8489 + (integer_onep (equal_result) * 2)
8490 + integer_onep (low_result))
8494 return omit_one_operand (type, integer_zero_node, arg0);
8515 return omit_one_operand (type, integer_one_node, arg0);
8519 return save_expr (build2 (code, type, cval1, cval2));
8520 return fold_build2 (code, type, cval1, cval2);
8525 /* Fold a comparison of the address of COMPONENT_REFs with the same
8526 type and component to a comparison of the address of the base
8527 object. In short, &x->a OP &y->a to x OP y and
8528 &x->a OP &y.a to x OP &y */
8529 if (TREE_CODE (arg0) == ADDR_EXPR
8530 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8531 && TREE_CODE (arg1) == ADDR_EXPR
8532 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8534 tree cref0 = TREE_OPERAND (arg0, 0);
8535 tree cref1 = TREE_OPERAND (arg1, 0);
8536 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8538 tree op0 = TREE_OPERAND (cref0, 0);
8539 tree op1 = TREE_OPERAND (cref1, 0);
8540 return fold_build2 (code, type,
8541 build_fold_addr_expr (op0),
8542 build_fold_addr_expr (op1));
8546 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8547 into a single range test. */
8548 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8549 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8550 && TREE_CODE (arg1) == INTEGER_CST
8551 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8552 && !integer_zerop (TREE_OPERAND (arg0, 1))
8553 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8554 && !TREE_OVERFLOW (arg1))
8556 tem = fold_div_compare (code, type, arg0, arg1);
8557 if (tem != NULL_TREE)
8561 /* Fold ~X op ~Y as Y op X. */
8562 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8563 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8564 return fold_build2 (code, type,
8565 TREE_OPERAND (arg1, 0),
8566 TREE_OPERAND (arg0, 0));
8568 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8569 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8570 && TREE_CODE (arg1) == INTEGER_CST)
8571 return fold_build2 (swap_tree_comparison (code), type,
8572 TREE_OPERAND (arg0, 0),
8573 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8579 /* Subroutine of fold_binary. Optimize complex multiplications of the
8580 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8581 argument EXPR represents the expression "z" of type TYPE. */
8584 fold_mult_zconjz (tree type, tree expr)
8586 tree itype = TREE_TYPE (type);
8587 tree rpart, ipart, tem;
8589 if (TREE_CODE (expr) == COMPLEX_EXPR)
8591 rpart = TREE_OPERAND (expr, 0);
8592 ipart = TREE_OPERAND (expr, 1);
8594 else if (TREE_CODE (expr) == COMPLEX_CST)
8596 rpart = TREE_REALPART (expr);
8597 ipart = TREE_IMAGPART (expr);
8601 expr = save_expr (expr);
8602 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8603 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8606 rpart = save_expr (rpart);
8607 ipart = save_expr (ipart);
8608 tem = fold_build2 (PLUS_EXPR, itype,
8609 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8610 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8611 return fold_build2 (COMPLEX_EXPR, type, tem,
8612 fold_convert (itype, integer_zero_node));
8616 /* Fold a binary expression of code CODE and type TYPE with operands
8617 OP0 and OP1. Return the folded expression if folding is
8618 successful. Otherwise, return NULL_TREE. */
8621 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8623 enum tree_code_class kind = TREE_CODE_CLASS (code);
8624 tree arg0, arg1, tem;
8625 tree t1 = NULL_TREE;
8627 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8628 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8629 && TREE_CODE_LENGTH (code) == 2
8631 && op1 != NULL_TREE);
8636 /* Strip any conversions that don't change the mode. This is
8637 safe for every expression, except for a comparison expression
8638 because its signedness is derived from its operands. So, in
8639 the latter case, only strip conversions that don't change the
8642 Note that this is done as an internal manipulation within the
8643 constant folder, in order to find the simplest representation
8644 of the arguments so that their form can be studied. In any
8645 cases, the appropriate type conversions should be put back in
8646 the tree that will get out of the constant folder. */
8648 if (kind == tcc_comparison)
8650 STRIP_SIGN_NOPS (arg0);
8651 STRIP_SIGN_NOPS (arg1);
8659 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8660 constant but we can't do arithmetic on them. */
8661 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8662 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8663 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8664 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8666 if (kind == tcc_binary)
8667 tem = const_binop (code, arg0, arg1, 0);
8668 else if (kind == tcc_comparison)
8669 tem = fold_relational_const (code, type, arg0, arg1);
8673 if (tem != NULL_TREE)
8675 if (TREE_TYPE (tem) != type)
8676 tem = fold_convert (type, tem);
8681 /* If this is a commutative operation, and ARG0 is a constant, move it
8682 to ARG1 to reduce the number of tests below. */
8683 if (commutative_tree_code (code)
8684 && tree_swap_operands_p (arg0, arg1, true))
8685 return fold_build2 (code, type, op1, op0);
8687 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8689 First check for cases where an arithmetic operation is applied to a
8690 compound, conditional, or comparison operation. Push the arithmetic
8691 operation inside the compound or conditional to see if any folding
8692 can then be done. Convert comparison to conditional for this purpose.
8693 The also optimizes non-constant cases that used to be done in
8696 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8697 one of the operands is a comparison and the other is a comparison, a
8698 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8699 code below would make the expression more complex. Change it to a
8700 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8701 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8703 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8704 || code == EQ_EXPR || code == NE_EXPR)
8705 && ((truth_value_p (TREE_CODE (arg0))
8706 && (truth_value_p (TREE_CODE (arg1))
8707 || (TREE_CODE (arg1) == BIT_AND_EXPR
8708 && integer_onep (TREE_OPERAND (arg1, 1)))))
8709 || (truth_value_p (TREE_CODE (arg1))
8710 && (truth_value_p (TREE_CODE (arg0))
8711 || (TREE_CODE (arg0) == BIT_AND_EXPR
8712 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8714 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8715 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8718 fold_convert (boolean_type_node, arg0),
8719 fold_convert (boolean_type_node, arg1));
8721 if (code == EQ_EXPR)
8722 tem = invert_truthvalue (tem);
8724 return fold_convert (type, tem);
8727 if (TREE_CODE_CLASS (code) == tcc_binary
8728 || TREE_CODE_CLASS (code) == tcc_comparison)
8730 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8731 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8732 fold_build2 (code, type,
8733 TREE_OPERAND (arg0, 1), op1));
8734 if (TREE_CODE (arg1) == COMPOUND_EXPR
8735 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8736 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8737 fold_build2 (code, type,
8738 op0, TREE_OPERAND (arg1, 1)));
8740 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8742 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8744 /*cond_first_p=*/1);
8745 if (tem != NULL_TREE)
8749 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8751 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8753 /*cond_first_p=*/0);
8754 if (tem != NULL_TREE)
8762 /* A + (-B) -> A - B */
8763 if (TREE_CODE (arg1) == NEGATE_EXPR)
8764 return fold_build2 (MINUS_EXPR, type,
8765 fold_convert (type, arg0),
8766 fold_convert (type, TREE_OPERAND (arg1, 0)));
8767 /* (-A) + B -> B - A */
8768 if (TREE_CODE (arg0) == NEGATE_EXPR
8769 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8770 return fold_build2 (MINUS_EXPR, type,
8771 fold_convert (type, arg1),
8772 fold_convert (type, TREE_OPERAND (arg0, 0)));
8773 /* Convert ~A + 1 to -A. */
8774 if (INTEGRAL_TYPE_P (type)
8775 && TREE_CODE (arg0) == BIT_NOT_EXPR
8776 && integer_onep (arg1))
8777 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8779 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8781 if ((TREE_CODE (arg0) == MULT_EXPR
8782 || TREE_CODE (arg1) == MULT_EXPR)
8783 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8785 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8790 if (! FLOAT_TYPE_P (type))
8792 if (integer_zerop (arg1))
8793 return non_lvalue (fold_convert (type, arg0));
8796 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8797 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8798 && !TYPE_TRAP_SIGNED (type))
8800 t1 = build_int_cst_type (type, -1);
8801 return omit_one_operand (type, t1, arg1);
8805 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8806 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8807 && !TYPE_TRAP_SIGNED (type))
8809 t1 = build_int_cst_type (type, -1);
8810 return omit_one_operand (type, t1, arg0);
8813 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8814 with a constant, and the two constants have no bits in common,
8815 we should treat this as a BIT_IOR_EXPR since this may produce more
8817 if (TREE_CODE (arg0) == BIT_AND_EXPR
8818 && TREE_CODE (arg1) == BIT_AND_EXPR
8819 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8820 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8821 && integer_zerop (const_binop (BIT_AND_EXPR,
8822 TREE_OPERAND (arg0, 1),
8823 TREE_OPERAND (arg1, 1), 0)))
8825 code = BIT_IOR_EXPR;
8829 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8830 (plus (plus (mult) (mult)) (foo)) so that we can
8831 take advantage of the factoring cases below. */
8832 if (((TREE_CODE (arg0) == PLUS_EXPR
8833 || TREE_CODE (arg0) == MINUS_EXPR)
8834 && TREE_CODE (arg1) == MULT_EXPR)
8835 || ((TREE_CODE (arg1) == PLUS_EXPR
8836 || TREE_CODE (arg1) == MINUS_EXPR)
8837 && TREE_CODE (arg0) == MULT_EXPR))
8839 tree parg0, parg1, parg, marg;
8840 enum tree_code pcode;
8842 if (TREE_CODE (arg1) == MULT_EXPR)
8843 parg = arg0, marg = arg1;
8845 parg = arg1, marg = arg0;
8846 pcode = TREE_CODE (parg);
8847 parg0 = TREE_OPERAND (parg, 0);
8848 parg1 = TREE_OPERAND (parg, 1);
8852 if (TREE_CODE (parg0) == MULT_EXPR
8853 && TREE_CODE (parg1) != MULT_EXPR)
8854 return fold_build2 (pcode, type,
8855 fold_build2 (PLUS_EXPR, type,
8856 fold_convert (type, parg0),
8857 fold_convert (type, marg)),
8858 fold_convert (type, parg1));
8859 if (TREE_CODE (parg0) != MULT_EXPR
8860 && TREE_CODE (parg1) == MULT_EXPR)
8861 return fold_build2 (PLUS_EXPR, type,
8862 fold_convert (type, parg0),
8863 fold_build2 (pcode, type,
8864 fold_convert (type, marg),
8869 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8870 of the array. Loop optimizer sometimes produce this type of
8872 if (TREE_CODE (arg0) == ADDR_EXPR)
8874 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8876 return fold_convert (type, tem);
8878 else if (TREE_CODE (arg1) == ADDR_EXPR)
8880 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8882 return fold_convert (type, tem);
8887 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8888 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8889 return non_lvalue (fold_convert (type, arg0));
8891 /* Likewise if the operands are reversed. */
8892 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8893 return non_lvalue (fold_convert (type, arg1));
8895 /* Convert X + -C into X - C. */
8896 if (TREE_CODE (arg1) == REAL_CST
8897 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8899 tem = fold_negate_const (arg1, type);
8900 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8901 return fold_build2 (MINUS_EXPR, type,
8902 fold_convert (type, arg0),
8903 fold_convert (type, tem));
8906 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8907 to __complex__ ( x, y ). This is not the same for SNaNs or
8908 if singed zeros are involved. */
8909 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8910 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8911 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8913 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8914 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8915 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8916 bool arg0rz = false, arg0iz = false;
8917 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8918 || (arg0i && (arg0iz = real_zerop (arg0i))))
8920 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8921 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8922 if (arg0rz && arg1i && real_zerop (arg1i))
8924 tree rp = arg1r ? arg1r
8925 : build1 (REALPART_EXPR, rtype, arg1);
8926 tree ip = arg0i ? arg0i
8927 : build1 (IMAGPART_EXPR, rtype, arg0);
8928 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8930 else if (arg0iz && arg1r && real_zerop (arg1r))
8932 tree rp = arg0r ? arg0r
8933 : build1 (REALPART_EXPR, rtype, arg0);
8934 tree ip = arg1i ? arg1i
8935 : build1 (IMAGPART_EXPR, rtype, arg1);
8936 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8941 if (flag_unsafe_math_optimizations
8942 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8943 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8944 && (tem = distribute_real_division (code, type, arg0, arg1)))
8947 /* Convert x+x into x*2.0. */
8948 if (operand_equal_p (arg0, arg1, 0)
8949 && SCALAR_FLOAT_TYPE_P (type))
8950 return fold_build2 (MULT_EXPR, type, arg0,
8951 build_real (type, dconst2));
8953 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8954 if (flag_unsafe_math_optimizations
8955 && TREE_CODE (arg1) == PLUS_EXPR
8956 && TREE_CODE (arg0) != MULT_EXPR)
8958 tree tree10 = TREE_OPERAND (arg1, 0);
8959 tree tree11 = TREE_OPERAND (arg1, 1);
8960 if (TREE_CODE (tree11) == MULT_EXPR
8961 && TREE_CODE (tree10) == MULT_EXPR)
8964 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8965 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8968 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8969 if (flag_unsafe_math_optimizations
8970 && TREE_CODE (arg0) == PLUS_EXPR
8971 && TREE_CODE (arg1) != MULT_EXPR)
8973 tree tree00 = TREE_OPERAND (arg0, 0);
8974 tree tree01 = TREE_OPERAND (arg0, 1);
8975 if (TREE_CODE (tree01) == MULT_EXPR
8976 && TREE_CODE (tree00) == MULT_EXPR)
8979 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8980 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8986 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8987 is a rotate of A by C1 bits. */
8988 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8989 is a rotate of A by B bits. */
8991 enum tree_code code0, code1;
8992 code0 = TREE_CODE (arg0);
8993 code1 = TREE_CODE (arg1);
8994 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8995 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8996 && operand_equal_p (TREE_OPERAND (arg0, 0),
8997 TREE_OPERAND (arg1, 0), 0)
8998 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9000 tree tree01, tree11;
9001 enum tree_code code01, code11;
9003 tree01 = TREE_OPERAND (arg0, 1);
9004 tree11 = TREE_OPERAND (arg1, 1);
9005 STRIP_NOPS (tree01);
9006 STRIP_NOPS (tree11);
9007 code01 = TREE_CODE (tree01);
9008 code11 = TREE_CODE (tree11);
9009 if (code01 == INTEGER_CST
9010 && code11 == INTEGER_CST
9011 && TREE_INT_CST_HIGH (tree01) == 0
9012 && TREE_INT_CST_HIGH (tree11) == 0
9013 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9014 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9015 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9016 code0 == LSHIFT_EXPR ? tree01 : tree11);
9017 else if (code11 == MINUS_EXPR)
9019 tree tree110, tree111;
9020 tree110 = TREE_OPERAND (tree11, 0);
9021 tree111 = TREE_OPERAND (tree11, 1);
9022 STRIP_NOPS (tree110);
9023 STRIP_NOPS (tree111);
9024 if (TREE_CODE (tree110) == INTEGER_CST
9025 && 0 == compare_tree_int (tree110,
9027 (TREE_TYPE (TREE_OPERAND
9029 && operand_equal_p (tree01, tree111, 0))
9030 return build2 ((code0 == LSHIFT_EXPR
9033 type, TREE_OPERAND (arg0, 0), tree01);
9035 else if (code01 == MINUS_EXPR)
9037 tree tree010, tree011;
9038 tree010 = TREE_OPERAND (tree01, 0);
9039 tree011 = TREE_OPERAND (tree01, 1);
9040 STRIP_NOPS (tree010);
9041 STRIP_NOPS (tree011);
9042 if (TREE_CODE (tree010) == INTEGER_CST
9043 && 0 == compare_tree_int (tree010,
9045 (TREE_TYPE (TREE_OPERAND
9047 && operand_equal_p (tree11, tree011, 0))
9048 return build2 ((code0 != LSHIFT_EXPR
9051 type, TREE_OPERAND (arg0, 0), tree11);
9057 /* In most languages, can't associate operations on floats through
9058 parentheses. Rather than remember where the parentheses were, we
9059 don't associate floats at all, unless the user has specified
9060 -funsafe-math-optimizations. */
9062 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9064 tree var0, con0, lit0, minus_lit0;
9065 tree var1, con1, lit1, minus_lit1;
9067 /* Split both trees into variables, constants, and literals. Then
9068 associate each group together, the constants with literals,
9069 then the result with variables. This increases the chances of
9070 literals being recombined later and of generating relocatable
9071 expressions for the sum of a constant and literal. */
9072 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9073 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9074 code == MINUS_EXPR);
9076 /* Only do something if we found more than two objects. Otherwise,
9077 nothing has changed and we risk infinite recursion. */
9078 if (2 < ((var0 != 0) + (var1 != 0)
9079 + (con0 != 0) + (con1 != 0)
9080 + (lit0 != 0) + (lit1 != 0)
9081 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9083 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9084 if (code == MINUS_EXPR)
9087 var0 = associate_trees (var0, var1, code, type);
9088 con0 = associate_trees (con0, con1, code, type);
9089 lit0 = associate_trees (lit0, lit1, code, type);
9090 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9092 /* Preserve the MINUS_EXPR if the negative part of the literal is
9093 greater than the positive part. Otherwise, the multiplicative
9094 folding code (i.e extract_muldiv) may be fooled in case
9095 unsigned constants are subtracted, like in the following
9096 example: ((X*2 + 4) - 8U)/2. */
9097 if (minus_lit0 && lit0)
9099 if (TREE_CODE (lit0) == INTEGER_CST
9100 && TREE_CODE (minus_lit0) == INTEGER_CST
9101 && tree_int_cst_lt (lit0, minus_lit0))
9103 minus_lit0 = associate_trees (minus_lit0, lit0,
9109 lit0 = associate_trees (lit0, minus_lit0,
9117 return fold_convert (type,
9118 associate_trees (var0, minus_lit0,
9122 con0 = associate_trees (con0, minus_lit0,
9124 return fold_convert (type,
9125 associate_trees (var0, con0,
9130 con0 = associate_trees (con0, lit0, code, type);
9131 return fold_convert (type, associate_trees (var0, con0,
9139 /* A - (-B) -> A + B */
9140 if (TREE_CODE (arg1) == NEGATE_EXPR)
9141 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9142 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9143 if (TREE_CODE (arg0) == NEGATE_EXPR
9144 && (FLOAT_TYPE_P (type)
9145 || INTEGRAL_TYPE_P (type))
9146 && negate_expr_p (arg1)
9147 && reorder_operands_p (arg0, arg1))
9148 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9149 TREE_OPERAND (arg0, 0));
9150 /* Convert -A - 1 to ~A. */
9151 if (INTEGRAL_TYPE_P (type)
9152 && TREE_CODE (arg0) == NEGATE_EXPR
9153 && integer_onep (arg1)
9154 && !TYPE_TRAP_SIGNED (type))
9155 return fold_build1 (BIT_NOT_EXPR, type,
9156 fold_convert (type, TREE_OPERAND (arg0, 0)));
9158 /* Convert -1 - A to ~A. */
9159 if (INTEGRAL_TYPE_P (type)
9160 && integer_all_onesp (arg0))
9161 return fold_build1 (BIT_NOT_EXPR, type, op1);
9163 if (! FLOAT_TYPE_P (type))
9165 if (integer_zerop (arg0))
9166 return negate_expr (fold_convert (type, arg1));
9167 if (integer_zerop (arg1))
9168 return non_lvalue (fold_convert (type, arg0));
9170 /* Fold A - (A & B) into ~B & A. */
9171 if (!TREE_SIDE_EFFECTS (arg0)
9172 && TREE_CODE (arg1) == BIT_AND_EXPR)
9174 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9175 return fold_build2 (BIT_AND_EXPR, type,
9176 fold_build1 (BIT_NOT_EXPR, type,
9177 TREE_OPERAND (arg1, 0)),
9179 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9180 return fold_build2 (BIT_AND_EXPR, type,
9181 fold_build1 (BIT_NOT_EXPR, type,
9182 TREE_OPERAND (arg1, 1)),
9186 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9187 any power of 2 minus 1. */
9188 if (TREE_CODE (arg0) == BIT_AND_EXPR
9189 && TREE_CODE (arg1) == BIT_AND_EXPR
9190 && operand_equal_p (TREE_OPERAND (arg0, 0),
9191 TREE_OPERAND (arg1, 0), 0))
9193 tree mask0 = TREE_OPERAND (arg0, 1);
9194 tree mask1 = TREE_OPERAND (arg1, 1);
9195 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9197 if (operand_equal_p (tem, mask1, 0))
9199 tem = fold_build2 (BIT_XOR_EXPR, type,
9200 TREE_OPERAND (arg0, 0), mask1);
9201 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9206 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9207 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9208 return non_lvalue (fold_convert (type, arg0));
9210 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9211 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9212 (-ARG1 + ARG0) reduces to -ARG1. */
9213 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9214 return negate_expr (fold_convert (type, arg1));
9216 /* Fold &x - &x. This can happen from &x.foo - &x.
9217 This is unsafe for certain floats even in non-IEEE formats.
9218 In IEEE, it is unsafe because it does wrong for NaNs.
9219 Also note that operand_equal_p is always false if an operand
9222 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9223 && operand_equal_p (arg0, arg1, 0))
9224 return fold_convert (type, integer_zero_node);
9226 /* A - B -> A + (-B) if B is easily negatable. */
9227 if (negate_expr_p (arg1)
9228 && ((FLOAT_TYPE_P (type)
9229 /* Avoid this transformation if B is a positive REAL_CST. */
9230 && (TREE_CODE (arg1) != REAL_CST
9231 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9232 || INTEGRAL_TYPE_P (type)))
9233 return fold_build2 (PLUS_EXPR, type,
9234 fold_convert (type, arg0),
9235 fold_convert (type, negate_expr (arg1)));
9237 /* Try folding difference of addresses. */
9241 if ((TREE_CODE (arg0) == ADDR_EXPR
9242 || TREE_CODE (arg1) == ADDR_EXPR)
9243 && ptr_difference_const (arg0, arg1, &diff))
9244 return build_int_cst_type (type, diff);
9247 /* Fold &a[i] - &a[j] to i-j. */
9248 if (TREE_CODE (arg0) == ADDR_EXPR
9249 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9250 && TREE_CODE (arg1) == ADDR_EXPR
9251 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9253 tree aref0 = TREE_OPERAND (arg0, 0);
9254 tree aref1 = TREE_OPERAND (arg1, 0);
9255 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9256 TREE_OPERAND (aref1, 0), 0))
9258 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9259 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9260 tree esz = array_ref_element_size (aref0);
9261 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9262 return fold_build2 (MULT_EXPR, type, diff,
9263 fold_convert (type, esz));
9268 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9269 of the array. Loop optimizer sometimes produce this type of
9271 if (TREE_CODE (arg0) == ADDR_EXPR)
9273 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9275 return fold_convert (type, tem);
9278 if (flag_unsafe_math_optimizations
9279 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9280 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9281 && (tem = distribute_real_division (code, type, arg0, arg1)))
9284 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9286 if ((TREE_CODE (arg0) == MULT_EXPR
9287 || TREE_CODE (arg1) == MULT_EXPR)
9288 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9290 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9298 /* (-A) * (-B) -> A * B */
9299 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9300 return fold_build2 (MULT_EXPR, type,
9301 fold_convert (type, TREE_OPERAND (arg0, 0)),
9302 fold_convert (type, negate_expr (arg1)));
9303 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9304 return fold_build2 (MULT_EXPR, type,
9305 fold_convert (type, negate_expr (arg0)),
9306 fold_convert (type, TREE_OPERAND (arg1, 0)));
9308 if (! FLOAT_TYPE_P (type))
9310 if (integer_zerop (arg1))
9311 return omit_one_operand (type, arg1, arg0);
9312 if (integer_onep (arg1))
9313 return non_lvalue (fold_convert (type, arg0));
9314 /* Transform x * -1 into -x. */
9315 if (integer_all_onesp (arg1))
9316 return fold_convert (type, negate_expr (arg0));
9317 /* Transform x * -C into -x * C if x is easily negatable. */
9318 if (TREE_CODE (arg1) == INTEGER_CST
9319 && tree_int_cst_sgn (arg1) == -1
9320 && negate_expr_p (arg0)
9321 && (tem = negate_expr (arg1)) != arg1
9322 && !TREE_OVERFLOW (tem))
9323 return fold_build2 (MULT_EXPR, type,
9324 negate_expr (arg0), tem);
9326 /* (a * (1 << b)) is (a << b) */
9327 if (TREE_CODE (arg1) == LSHIFT_EXPR
9328 && integer_onep (TREE_OPERAND (arg1, 0)))
9329 return fold_build2 (LSHIFT_EXPR, type, arg0,
9330 TREE_OPERAND (arg1, 1));
9331 if (TREE_CODE (arg0) == LSHIFT_EXPR
9332 && integer_onep (TREE_OPERAND (arg0, 0)))
9333 return fold_build2 (LSHIFT_EXPR, type, arg1,
9334 TREE_OPERAND (arg0, 1));
9336 if (TREE_CODE (arg1) == INTEGER_CST
9337 && 0 != (tem = extract_muldiv (op0,
9338 fold_convert (type, arg1),
9340 return fold_convert (type, tem);
9342 /* Optimize z * conj(z) for integer complex numbers. */
9343 if (TREE_CODE (arg0) == CONJ_EXPR
9344 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9345 return fold_mult_zconjz (type, arg1);
9346 if (TREE_CODE (arg1) == CONJ_EXPR
9347 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9348 return fold_mult_zconjz (type, arg0);
9352 /* Maybe fold x * 0 to 0. The expressions aren't the same
9353 when x is NaN, since x * 0 is also NaN. Nor are they the
9354 same in modes with signed zeros, since multiplying a
9355 negative value by 0 gives -0, not +0. */
9356 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9357 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9358 && real_zerop (arg1))
9359 return omit_one_operand (type, arg1, arg0);
9360 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9361 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9362 && real_onep (arg1))
9363 return non_lvalue (fold_convert (type, arg0));
9365 /* Transform x * -1.0 into -x. */
9366 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9367 && real_minus_onep (arg1))
9368 return fold_convert (type, negate_expr (arg0));
9370 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9371 if (flag_unsafe_math_optimizations
9372 && TREE_CODE (arg0) == RDIV_EXPR
9373 && TREE_CODE (arg1) == REAL_CST
9374 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9376 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9379 return fold_build2 (RDIV_EXPR, type, tem,
9380 TREE_OPERAND (arg0, 1));
9383 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9384 if (operand_equal_p (arg0, arg1, 0))
9386 tree tem = fold_strip_sign_ops (arg0);
9387 if (tem != NULL_TREE)
9389 tem = fold_convert (type, tem);
9390 return fold_build2 (MULT_EXPR, type, tem, tem);
9394 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9395 This is not the same for NaNs or if singed zeros are
9397 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9398 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9399 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9400 && TREE_CODE (arg1) == COMPLEX_CST
9401 && real_zerop (TREE_REALPART (arg1)))
9403 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9404 if (real_onep (TREE_IMAGPART (arg1)))
9405 return fold_build2 (COMPLEX_EXPR, type,
9406 negate_expr (fold_build1 (IMAGPART_EXPR,
9408 fold_build1 (REALPART_EXPR, rtype, arg0));
9409 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9410 return fold_build2 (COMPLEX_EXPR, type,
9411 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9412 negate_expr (fold_build1 (REALPART_EXPR,
9416 /* Optimize z * conj(z) for floating point complex numbers.
9417 Guarded by flag_unsafe_math_optimizations as non-finite
9418 imaginary components don't produce scalar results. */
9419 if (flag_unsafe_math_optimizations
9420 && TREE_CODE (arg0) == CONJ_EXPR
9421 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9422 return fold_mult_zconjz (type, arg1);
9423 if (flag_unsafe_math_optimizations
9424 && TREE_CODE (arg1) == CONJ_EXPR
9425 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9426 return fold_mult_zconjz (type, arg0);
9428 if (flag_unsafe_math_optimizations)
9430 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9431 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9433 /* Optimizations of root(...)*root(...). */
9434 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9436 tree rootfn, arg, arglist;
9437 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9438 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9440 /* Optimize sqrt(x)*sqrt(x) as x. */
9441 if (BUILTIN_SQRT_P (fcode0)
9442 && operand_equal_p (arg00, arg10, 0)
9443 && ! HONOR_SNANS (TYPE_MODE (type)))
9446 /* Optimize root(x)*root(y) as root(x*y). */
9447 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9448 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9449 arglist = build_tree_list (NULL_TREE, arg);
9450 return build_function_call_expr (rootfn, arglist);
9453 /* Optimize expN(x)*expN(y) as expN(x+y). */
9454 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9456 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9457 tree arg = fold_build2 (PLUS_EXPR, type,
9458 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9459 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9460 tree arglist = build_tree_list (NULL_TREE, arg);
9461 return build_function_call_expr (expfn, arglist);
9464 /* Optimizations of pow(...)*pow(...). */
9465 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9466 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9467 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9469 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9470 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9472 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9473 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9476 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9477 if (operand_equal_p (arg01, arg11, 0))
9479 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9480 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9481 tree arglist = tree_cons (NULL_TREE, arg,
9482 build_tree_list (NULL_TREE,
9484 return build_function_call_expr (powfn, arglist);
9487 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9488 if (operand_equal_p (arg00, arg10, 0))
9490 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9491 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9492 tree arglist = tree_cons (NULL_TREE, arg00,
9493 build_tree_list (NULL_TREE,
9495 return build_function_call_expr (powfn, arglist);
9499 /* Optimize tan(x)*cos(x) as sin(x). */
9500 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9501 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9502 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9503 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9504 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9505 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9506 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9507 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9509 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9511 if (sinfn != NULL_TREE)
9512 return build_function_call_expr (sinfn,
9513 TREE_OPERAND (arg0, 1));
9516 /* Optimize x*pow(x,c) as pow(x,c+1). */
9517 if (fcode1 == BUILT_IN_POW
9518 || fcode1 == BUILT_IN_POWF
9519 || fcode1 == BUILT_IN_POWL)
9521 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9522 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9524 if (TREE_CODE (arg11) == REAL_CST
9525 && ! TREE_CONSTANT_OVERFLOW (arg11)
9526 && operand_equal_p (arg0, arg10, 0))
9528 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9532 c = TREE_REAL_CST (arg11);
9533 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9534 arg = build_real (type, c);
9535 arglist = build_tree_list (NULL_TREE, arg);
9536 arglist = tree_cons (NULL_TREE, arg0, arglist);
9537 return build_function_call_expr (powfn, arglist);
9541 /* Optimize pow(x,c)*x as pow(x,c+1). */
9542 if (fcode0 == BUILT_IN_POW
9543 || fcode0 == BUILT_IN_POWF
9544 || fcode0 == BUILT_IN_POWL)
9546 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9547 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9549 if (TREE_CODE (arg01) == REAL_CST
9550 && ! TREE_CONSTANT_OVERFLOW (arg01)
9551 && operand_equal_p (arg1, arg00, 0))
9553 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9557 c = TREE_REAL_CST (arg01);
9558 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9559 arg = build_real (type, c);
9560 arglist = build_tree_list (NULL_TREE, arg);
9561 arglist = tree_cons (NULL_TREE, arg1, arglist);
9562 return build_function_call_expr (powfn, arglist);
9566 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9568 && operand_equal_p (arg0, arg1, 0))
9570 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9574 tree arg = build_real (type, dconst2);
9575 tree arglist = build_tree_list (NULL_TREE, arg);
9576 arglist = tree_cons (NULL_TREE, arg0, arglist);
9577 return build_function_call_expr (powfn, arglist);
9586 if (integer_all_onesp (arg1))
9587 return omit_one_operand (type, arg1, arg0);
9588 if (integer_zerop (arg1))
9589 return non_lvalue (fold_convert (type, arg0));
9590 if (operand_equal_p (arg0, arg1, 0))
9591 return non_lvalue (fold_convert (type, arg0));
9594 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9595 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9597 t1 = build_int_cst_type (type, -1);
9598 return omit_one_operand (type, t1, arg1);
9602 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9603 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9605 t1 = build_int_cst_type (type, -1);
9606 return omit_one_operand (type, t1, arg0);
9609 /* Canonicalize (X & C1) | C2. */
9610 if (TREE_CODE (arg0) == BIT_AND_EXPR
9611 && TREE_CODE (arg1) == INTEGER_CST
9612 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9614 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9615 int width = TYPE_PRECISION (type);
9616 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9617 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9618 hi2 = TREE_INT_CST_HIGH (arg1);
9619 lo2 = TREE_INT_CST_LOW (arg1);
9621 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9622 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9623 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9625 if (width > HOST_BITS_PER_WIDE_INT)
9627 mhi = (unsigned HOST_WIDE_INT) -1
9628 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9634 mlo = (unsigned HOST_WIDE_INT) -1
9635 >> (HOST_BITS_PER_WIDE_INT - width);
9638 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9639 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9640 return fold_build2 (BIT_IOR_EXPR, type,
9641 TREE_OPERAND (arg0, 0), arg1);
9643 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9646 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9647 return fold_build2 (BIT_IOR_EXPR, type,
9648 fold_build2 (BIT_AND_EXPR, type,
9649 TREE_OPERAND (arg0, 0),
9650 build_int_cst_wide (type,
9656 /* (X & Y) | Y is (X, Y). */
9657 if (TREE_CODE (arg0) == BIT_AND_EXPR
9658 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9659 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9660 /* (X & Y) | X is (Y, X). */
9661 if (TREE_CODE (arg0) == BIT_AND_EXPR
9662 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9663 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9664 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9665 /* X | (X & Y) is (Y, X). */
9666 if (TREE_CODE (arg1) == BIT_AND_EXPR
9667 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9668 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9669 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9670 /* X | (Y & X) is (Y, X). */
9671 if (TREE_CODE (arg1) == BIT_AND_EXPR
9672 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9673 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9674 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9676 t1 = distribute_bit_expr (code, type, arg0, arg1);
9677 if (t1 != NULL_TREE)
9680 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9682 This results in more efficient code for machines without a NAND
9683 instruction. Combine will canonicalize to the first form
9684 which will allow use of NAND instructions provided by the
9685 backend if they exist. */
9686 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9687 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9689 return fold_build1 (BIT_NOT_EXPR, type,
9690 build2 (BIT_AND_EXPR, type,
9691 TREE_OPERAND (arg0, 0),
9692 TREE_OPERAND (arg1, 0)));
9695 /* See if this can be simplified into a rotate first. If that
9696 is unsuccessful continue in the association code. */
9700 if (integer_zerop (arg1))
9701 return non_lvalue (fold_convert (type, arg0));
9702 if (integer_all_onesp (arg1))
9703 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9704 if (operand_equal_p (arg0, arg1, 0))
9705 return omit_one_operand (type, integer_zero_node, arg0);
9708 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9709 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9711 t1 = build_int_cst_type (type, -1);
9712 return omit_one_operand (type, t1, arg1);
9716 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9717 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9719 t1 = build_int_cst_type (type, -1);
9720 return omit_one_operand (type, t1, arg0);
9723 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9724 with a constant, and the two constants have no bits in common,
9725 we should treat this as a BIT_IOR_EXPR since this may produce more
9727 if (TREE_CODE (arg0) == BIT_AND_EXPR
9728 && TREE_CODE (arg1) == BIT_AND_EXPR
9729 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9730 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9731 && integer_zerop (const_binop (BIT_AND_EXPR,
9732 TREE_OPERAND (arg0, 1),
9733 TREE_OPERAND (arg1, 1), 0)))
9735 code = BIT_IOR_EXPR;
9739 /* (X | Y) ^ X -> Y & ~ X*/
9740 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9741 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9743 tree t2 = TREE_OPERAND (arg0, 1);
9744 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9746 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9747 fold_convert (type, t1));
9751 /* (Y | X) ^ X -> Y & ~ X*/
9752 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9753 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9755 tree t2 = TREE_OPERAND (arg0, 0);
9756 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9758 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9759 fold_convert (type, t1));
9763 /* X ^ (X | Y) -> Y & ~ X*/
9764 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9765 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9767 tree t2 = TREE_OPERAND (arg1, 1);
9768 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9770 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9771 fold_convert (type, t1));
9775 /* X ^ (Y | X) -> Y & ~ X*/
9776 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9777 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9779 tree t2 = TREE_OPERAND (arg1, 0);
9780 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9782 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9783 fold_convert (type, t1));
9787 /* Convert ~X ^ ~Y to X ^ Y. */
9788 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9789 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9790 return fold_build2 (code, type,
9791 fold_convert (type, TREE_OPERAND (arg0, 0)),
9792 fold_convert (type, TREE_OPERAND (arg1, 0)));
9794 /* Convert ~X ^ C to X ^ ~C. */
9795 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9796 && TREE_CODE (arg1) == INTEGER_CST)
9797 return fold_build2 (code, type,
9798 fold_convert (type, TREE_OPERAND (arg0, 0)),
9799 fold_build1 (BIT_NOT_EXPR, type, arg1));
9801 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9802 if (TREE_CODE (arg0) == BIT_AND_EXPR
9803 && integer_onep (TREE_OPERAND (arg0, 1))
9804 && integer_onep (arg1))
9805 return fold_build2 (EQ_EXPR, type, arg0,
9806 build_int_cst (TREE_TYPE (arg0), 0));
9808 /* Fold (X & Y) ^ Y as ~X & Y. */
9809 if (TREE_CODE (arg0) == BIT_AND_EXPR
9810 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9812 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9813 return fold_build2 (BIT_AND_EXPR, type,
9814 fold_build1 (BIT_NOT_EXPR, type, tem),
9815 fold_convert (type, arg1));
9817 /* Fold (X & Y) ^ X as ~Y & X. */
9818 if (TREE_CODE (arg0) == BIT_AND_EXPR
9819 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9820 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9822 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9823 return fold_build2 (BIT_AND_EXPR, type,
9824 fold_build1 (BIT_NOT_EXPR, type, tem),
9825 fold_convert (type, arg1));
9827 /* Fold X ^ (X & Y) as X & ~Y. */
9828 if (TREE_CODE (arg1) == BIT_AND_EXPR
9829 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9831 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9832 return fold_build2 (BIT_AND_EXPR, type,
9833 fold_convert (type, arg0),
9834 fold_build1 (BIT_NOT_EXPR, type, tem));
9836 /* Fold X ^ (Y & X) as ~Y & X. */
9837 if (TREE_CODE (arg1) == BIT_AND_EXPR
9838 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9839 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9841 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9842 return fold_build2 (BIT_AND_EXPR, type,
9843 fold_build1 (BIT_NOT_EXPR, type, tem),
9844 fold_convert (type, arg0));
9847 /* See if this can be simplified into a rotate first. If that
9848 is unsuccessful continue in the association code. */
9852 if (integer_all_onesp (arg1))
9853 return non_lvalue (fold_convert (type, arg0));
9854 if (integer_zerop (arg1))
9855 return omit_one_operand (type, arg1, arg0);
9856 if (operand_equal_p (arg0, arg1, 0))
9857 return non_lvalue (fold_convert (type, arg0));
9859 /* ~X & X is always zero. */
9860 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9861 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9862 return omit_one_operand (type, integer_zero_node, arg1);
9864 /* X & ~X is always zero. */
9865 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9866 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9867 return omit_one_operand (type, integer_zero_node, arg0);
9869 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9870 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9871 && TREE_CODE (arg1) == INTEGER_CST
9872 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9873 return fold_build2 (BIT_IOR_EXPR, type,
9874 fold_build2 (BIT_AND_EXPR, type,
9875 TREE_OPERAND (arg0, 0), arg1),
9876 fold_build2 (BIT_AND_EXPR, type,
9877 TREE_OPERAND (arg0, 1), arg1));
9879 /* (X | Y) & Y is (X, Y). */
9880 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9881 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9882 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9883 /* (X | Y) & X is (Y, X). */
9884 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9885 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9886 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9887 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9888 /* X & (X | Y) is (Y, X). */
9889 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9890 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9891 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9892 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9893 /* X & (Y | X) is (Y, X). */
9894 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9895 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9896 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9897 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9899 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9900 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9901 && integer_onep (TREE_OPERAND (arg0, 1))
9902 && integer_onep (arg1))
9904 tem = TREE_OPERAND (arg0, 0);
9905 return fold_build2 (EQ_EXPR, type,
9906 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9907 build_int_cst (TREE_TYPE (tem), 1)),
9908 build_int_cst (TREE_TYPE (tem), 0));
9910 /* Fold ~X & 1 as (X & 1) == 0. */
9911 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9912 && integer_onep (arg1))
9914 tem = TREE_OPERAND (arg0, 0);
9915 return fold_build2 (EQ_EXPR, type,
9916 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9917 build_int_cst (TREE_TYPE (tem), 1)),
9918 build_int_cst (TREE_TYPE (tem), 0));
9921 /* Fold (X ^ Y) & Y as ~X & Y. */
9922 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9923 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9925 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9926 return fold_build2 (BIT_AND_EXPR, type,
9927 fold_build1 (BIT_NOT_EXPR, type, tem),
9928 fold_convert (type, arg1));
9930 /* Fold (X ^ Y) & X as ~Y & X. */
9931 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9932 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9933 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9935 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9936 return fold_build2 (BIT_AND_EXPR, type,
9937 fold_build1 (BIT_NOT_EXPR, type, tem),
9938 fold_convert (type, arg1));
9940 /* Fold X & (X ^ Y) as X & ~Y. */
9941 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9942 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9944 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9945 return fold_build2 (BIT_AND_EXPR, type,
9946 fold_convert (type, arg0),
9947 fold_build1 (BIT_NOT_EXPR, type, tem));
9949 /* Fold X & (Y ^ X) as ~Y & X. */
9950 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9951 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9952 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9954 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9955 return fold_build2 (BIT_AND_EXPR, type,
9956 fold_build1 (BIT_NOT_EXPR, type, tem),
9957 fold_convert (type, arg0));
9960 t1 = distribute_bit_expr (code, type, arg0, arg1);
9961 if (t1 != NULL_TREE)
9963 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9964 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9965 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9968 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9970 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9971 && (~TREE_INT_CST_LOW (arg1)
9972 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9973 return fold_convert (type, TREE_OPERAND (arg0, 0));
9976 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9978 This results in more efficient code for machines without a NOR
9979 instruction. Combine will canonicalize to the first form
9980 which will allow use of NOR instructions provided by the
9981 backend if they exist. */
9982 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9983 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9985 return fold_build1 (BIT_NOT_EXPR, type,
9986 build2 (BIT_IOR_EXPR, type,
9987 TREE_OPERAND (arg0, 0),
9988 TREE_OPERAND (arg1, 0)));
9994 /* Don't touch a floating-point divide by zero unless the mode
9995 of the constant can represent infinity. */
9996 if (TREE_CODE (arg1) == REAL_CST
9997 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9998 && real_zerop (arg1))
10001 /* Optimize A / A to 1.0 if we don't care about
10002 NaNs or Infinities. Skip the transformation
10003 for non-real operands. */
10004 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10005 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10006 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10007 && operand_equal_p (arg0, arg1, 0))
10009 tree r = build_real (TREE_TYPE (arg0), dconst1);
10011 return omit_two_operands (type, r, arg0, arg1);
10014 /* The complex version of the above A / A optimization. */
10015 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10016 && operand_equal_p (arg0, arg1, 0))
10018 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10019 if (! HONOR_NANS (TYPE_MODE (elem_type))
10020 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10022 tree r = build_real (elem_type, dconst1);
10023 /* omit_two_operands will call fold_convert for us. */
10024 return omit_two_operands (type, r, arg0, arg1);
10028 /* (-A) / (-B) -> A / B */
10029 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10030 return fold_build2 (RDIV_EXPR, type,
10031 TREE_OPERAND (arg0, 0),
10032 negate_expr (arg1));
10033 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10034 return fold_build2 (RDIV_EXPR, type,
10035 negate_expr (arg0),
10036 TREE_OPERAND (arg1, 0));
10038 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10039 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10040 && real_onep (arg1))
10041 return non_lvalue (fold_convert (type, arg0));
10043 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10044 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10045 && real_minus_onep (arg1))
10046 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10048 /* If ARG1 is a constant, we can convert this to a multiply by the
10049 reciprocal. This does not have the same rounding properties,
10050 so only do this if -funsafe-math-optimizations. We can actually
10051 always safely do it if ARG1 is a power of two, but it's hard to
10052 tell if it is or not in a portable manner. */
10053 if (TREE_CODE (arg1) == REAL_CST)
10055 if (flag_unsafe_math_optimizations
10056 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10058 return fold_build2 (MULT_EXPR, type, arg0, tem);
10059 /* Find the reciprocal if optimizing and the result is exact. */
10063 r = TREE_REAL_CST (arg1);
10064 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10066 tem = build_real (type, r);
10067 return fold_build2 (MULT_EXPR, type,
10068 fold_convert (type, arg0), tem);
10072 /* Convert A/B/C to A/(B*C). */
10073 if (flag_unsafe_math_optimizations
10074 && TREE_CODE (arg0) == RDIV_EXPR)
10075 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10076 fold_build2 (MULT_EXPR, type,
10077 TREE_OPERAND (arg0, 1), arg1));
10079 /* Convert A/(B/C) to (A/B)*C. */
10080 if (flag_unsafe_math_optimizations
10081 && TREE_CODE (arg1) == RDIV_EXPR)
10082 return fold_build2 (MULT_EXPR, type,
10083 fold_build2 (RDIV_EXPR, type, arg0,
10084 TREE_OPERAND (arg1, 0)),
10085 TREE_OPERAND (arg1, 1));
10087 /* Convert C1/(X*C2) into (C1/C2)/X. */
10088 if (flag_unsafe_math_optimizations
10089 && TREE_CODE (arg1) == MULT_EXPR
10090 && TREE_CODE (arg0) == REAL_CST
10091 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10093 tree tem = const_binop (RDIV_EXPR, arg0,
10094 TREE_OPERAND (arg1, 1), 0);
10096 return fold_build2 (RDIV_EXPR, type, tem,
10097 TREE_OPERAND (arg1, 0));
10100 if (flag_unsafe_math_optimizations)
10102 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10103 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10105 /* Optimize sin(x)/cos(x) as tan(x). */
10106 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10107 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10108 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10109 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10110 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10112 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10114 if (tanfn != NULL_TREE)
10115 return build_function_call_expr (tanfn,
10116 TREE_OPERAND (arg0, 1));
10119 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10120 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10121 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10122 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10123 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10124 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10126 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10128 if (tanfn != NULL_TREE)
10130 tree tmp = TREE_OPERAND (arg0, 1);
10131 tmp = build_function_call_expr (tanfn, tmp);
10132 return fold_build2 (RDIV_EXPR, type,
10133 build_real (type, dconst1), tmp);
10137 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10138 NaNs or Infinities. */
10139 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10140 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10141 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10143 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10144 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10146 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10147 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10148 && operand_equal_p (arg00, arg01, 0))
10150 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10152 if (cosfn != NULL_TREE)
10153 return build_function_call_expr (cosfn,
10154 TREE_OPERAND (arg0, 1));
10158 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10159 NaNs or Infinities. */
10160 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10161 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10162 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10164 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10165 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10167 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10168 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10169 && operand_equal_p (arg00, arg01, 0))
10171 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10173 if (cosfn != NULL_TREE)
10175 tree tmp = TREE_OPERAND (arg0, 1);
10176 tmp = build_function_call_expr (cosfn, tmp);
10177 return fold_build2 (RDIV_EXPR, type,
10178 build_real (type, dconst1),
10184 /* Optimize pow(x,c)/x as pow(x,c-1). */
10185 if (fcode0 == BUILT_IN_POW
10186 || fcode0 == BUILT_IN_POWF
10187 || fcode0 == BUILT_IN_POWL)
10189 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10190 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10191 if (TREE_CODE (arg01) == REAL_CST
10192 && ! TREE_CONSTANT_OVERFLOW (arg01)
10193 && operand_equal_p (arg1, arg00, 0))
10195 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10199 c = TREE_REAL_CST (arg01);
10200 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10201 arg = build_real (type, c);
10202 arglist = build_tree_list (NULL_TREE, arg);
10203 arglist = tree_cons (NULL_TREE, arg1, arglist);
10204 return build_function_call_expr (powfn, arglist);
10208 /* Optimize x/expN(y) into x*expN(-y). */
10209 if (BUILTIN_EXPONENT_P (fcode1))
10211 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10212 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10213 tree arglist = build_tree_list (NULL_TREE,
10214 fold_convert (type, arg));
10215 arg1 = build_function_call_expr (expfn, arglist);
10216 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10219 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10220 if (fcode1 == BUILT_IN_POW
10221 || fcode1 == BUILT_IN_POWF
10222 || fcode1 == BUILT_IN_POWL)
10224 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10225 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10226 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10227 tree neg11 = fold_convert (type, negate_expr (arg11));
10228 tree arglist = tree_cons(NULL_TREE, arg10,
10229 build_tree_list (NULL_TREE, neg11));
10230 arg1 = build_function_call_expr (powfn, arglist);
10231 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10236 case TRUNC_DIV_EXPR:
10237 case FLOOR_DIV_EXPR:
10238 /* Simplify A / (B << N) where A and B are positive and B is
10239 a power of 2, to A >> (N + log2(B)). */
10240 if (TREE_CODE (arg1) == LSHIFT_EXPR
10241 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10243 tree sval = TREE_OPERAND (arg1, 0);
10244 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10246 tree sh_cnt = TREE_OPERAND (arg1, 1);
10247 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10249 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10250 sh_cnt, build_int_cst (NULL_TREE, pow2));
10251 return fold_build2 (RSHIFT_EXPR, type,
10252 fold_convert (type, arg0), sh_cnt);
10257 case ROUND_DIV_EXPR:
10258 case CEIL_DIV_EXPR:
10259 case EXACT_DIV_EXPR:
10260 if (integer_onep (arg1))
10261 return non_lvalue (fold_convert (type, arg0));
10262 if (integer_zerop (arg1))
10264 /* X / -1 is -X. */
10265 if (!TYPE_UNSIGNED (type)
10266 && TREE_CODE (arg1) == INTEGER_CST
10267 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10268 && TREE_INT_CST_HIGH (arg1) == -1)
10269 return fold_convert (type, negate_expr (arg0));
10271 /* Convert -A / -B to A / B when the type is signed and overflow is
10273 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10274 && TREE_CODE (arg0) == NEGATE_EXPR
10275 && negate_expr_p (arg1))
10276 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10277 negate_expr (arg1));
10278 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10279 && TREE_CODE (arg1) == NEGATE_EXPR
10280 && negate_expr_p (arg0))
10281 return fold_build2 (code, type, negate_expr (arg0),
10282 TREE_OPERAND (arg1, 0));
10284 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10285 operation, EXACT_DIV_EXPR.
10287 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10288 At one time others generated faster code, it's not clear if they do
10289 after the last round to changes to the DIV code in expmed.c. */
10290 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10291 && multiple_of_p (type, arg0, arg1))
10292 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10294 if (TREE_CODE (arg1) == INTEGER_CST
10295 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10296 return fold_convert (type, tem);
10300 case CEIL_MOD_EXPR:
10301 case FLOOR_MOD_EXPR:
10302 case ROUND_MOD_EXPR:
10303 case TRUNC_MOD_EXPR:
10304 /* X % 1 is always zero, but be sure to preserve any side
10306 if (integer_onep (arg1))
10307 return omit_one_operand (type, integer_zero_node, arg0);
10309 /* X % 0, return X % 0 unchanged so that we can get the
10310 proper warnings and errors. */
10311 if (integer_zerop (arg1))
10314 /* 0 % X is always zero, but be sure to preserve any side
10315 effects in X. Place this after checking for X == 0. */
10316 if (integer_zerop (arg0))
10317 return omit_one_operand (type, integer_zero_node, arg1);
10319 /* X % -1 is zero. */
10320 if (!TYPE_UNSIGNED (type)
10321 && TREE_CODE (arg1) == INTEGER_CST
10322 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10323 && TREE_INT_CST_HIGH (arg1) == -1)
10324 return omit_one_operand (type, integer_zero_node, arg0);
10326 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10327 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10328 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10329 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10332 /* Also optimize A % (C << N) where C is a power of 2,
10333 to A & ((C << N) - 1). */
10334 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10335 c = TREE_OPERAND (arg1, 0);
10337 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10339 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10340 build_int_cst (TREE_TYPE (arg1), 1));
10341 return fold_build2 (BIT_AND_EXPR, type,
10342 fold_convert (type, arg0),
10343 fold_convert (type, mask));
10347 /* X % -C is the same as X % C. */
10348 if (code == TRUNC_MOD_EXPR
10349 && !TYPE_UNSIGNED (type)
10350 && TREE_CODE (arg1) == INTEGER_CST
10351 && !TREE_CONSTANT_OVERFLOW (arg1)
10352 && TREE_INT_CST_HIGH (arg1) < 0
10354 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10355 && !sign_bit_p (arg1, arg1))
10356 return fold_build2 (code, type, fold_convert (type, arg0),
10357 fold_convert (type, negate_expr (arg1)));
10359 /* X % -Y is the same as X % Y. */
10360 if (code == TRUNC_MOD_EXPR
10361 && !TYPE_UNSIGNED (type)
10362 && TREE_CODE (arg1) == NEGATE_EXPR
10364 return fold_build2 (code, type, fold_convert (type, arg0),
10365 fold_convert (type, TREE_OPERAND (arg1, 0)));
10367 if (TREE_CODE (arg1) == INTEGER_CST
10368 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10369 return fold_convert (type, tem);
10375 if (integer_all_onesp (arg0))
10376 return omit_one_operand (type, arg0, arg1);
10380 /* Optimize -1 >> x for arithmetic right shifts. */
10381 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10382 return omit_one_operand (type, arg0, arg1);
10383 /* ... fall through ... */
10387 if (integer_zerop (arg1))
10388 return non_lvalue (fold_convert (type, arg0));
10389 if (integer_zerop (arg0))
10390 return omit_one_operand (type, arg0, arg1);
10392 /* Since negative shift count is not well-defined,
10393 don't try to compute it in the compiler. */
10394 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10397 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10398 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10399 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10400 && host_integerp (TREE_OPERAND (arg0, 1), false)
10401 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10403 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10404 + TREE_INT_CST_LOW (arg1));
10406 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10407 being well defined. */
10408 if (low >= TYPE_PRECISION (type))
10410 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10411 low = low % TYPE_PRECISION (type);
10412 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10413 return build_int_cst (type, 0);
10415 low = TYPE_PRECISION (type) - 1;
10418 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10419 build_int_cst (type, low));
10422 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10423 into x & ((unsigned)-1 >> c) for unsigned types. */
10424 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10425 || (TYPE_UNSIGNED (type)
10426 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10427 && host_integerp (arg1, false)
10428 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10429 && host_integerp (TREE_OPERAND (arg0, 1), false)
10430 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10432 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10433 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10439 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10441 lshift = build_int_cst (type, -1);
10442 lshift = int_const_binop (code, lshift, arg1, 0);
10444 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10448 /* Rewrite an LROTATE_EXPR by a constant into an
10449 RROTATE_EXPR by a new constant. */
10450 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10452 tree tem = build_int_cst (TREE_TYPE (arg1),
10453 GET_MODE_BITSIZE (TYPE_MODE (type)));
10454 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10455 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10458 /* If we have a rotate of a bit operation with the rotate count and
10459 the second operand of the bit operation both constant,
10460 permute the two operations. */
10461 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10462 && (TREE_CODE (arg0) == BIT_AND_EXPR
10463 || TREE_CODE (arg0) == BIT_IOR_EXPR
10464 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10465 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10466 return fold_build2 (TREE_CODE (arg0), type,
10467 fold_build2 (code, type,
10468 TREE_OPERAND (arg0, 0), arg1),
10469 fold_build2 (code, type,
10470 TREE_OPERAND (arg0, 1), arg1));
10472 /* Two consecutive rotates adding up to the width of the mode can
10474 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10475 && TREE_CODE (arg0) == RROTATE_EXPR
10476 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10477 && TREE_INT_CST_HIGH (arg1) == 0
10478 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10479 && ((TREE_INT_CST_LOW (arg1)
10480 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10481 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10482 return TREE_OPERAND (arg0, 0);
10487 if (operand_equal_p (arg0, arg1, 0))
10488 return omit_one_operand (type, arg0, arg1);
10489 if (INTEGRAL_TYPE_P (type)
10490 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10491 return omit_one_operand (type, arg1, arg0);
10492 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10498 if (operand_equal_p (arg0, arg1, 0))
10499 return omit_one_operand (type, arg0, arg1);
10500 if (INTEGRAL_TYPE_P (type)
10501 && TYPE_MAX_VALUE (type)
10502 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10503 return omit_one_operand (type, arg1, arg0);
10504 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10509 case TRUTH_ANDIF_EXPR:
10510 /* Note that the operands of this must be ints
10511 and their values must be 0 or 1.
10512 ("true" is a fixed value perhaps depending on the language.) */
10513 /* If first arg is constant zero, return it. */
10514 if (integer_zerop (arg0))
10515 return fold_convert (type, arg0);
10516 case TRUTH_AND_EXPR:
10517 /* If either arg is constant true, drop it. */
10518 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10519 return non_lvalue (fold_convert (type, arg1));
10520 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10521 /* Preserve sequence points. */
10522 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10523 return non_lvalue (fold_convert (type, arg0));
10524 /* If second arg is constant zero, result is zero, but first arg
10525 must be evaluated. */
10526 if (integer_zerop (arg1))
10527 return omit_one_operand (type, arg1, arg0);
10528 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10529 case will be handled here. */
10530 if (integer_zerop (arg0))
10531 return omit_one_operand (type, arg0, arg1);
10533 /* !X && X is always false. */
10534 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10535 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10536 return omit_one_operand (type, integer_zero_node, arg1);
10537 /* X && !X is always false. */
10538 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10539 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10540 return omit_one_operand (type, integer_zero_node, arg0);
10542 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10543 means A >= Y && A != MAX, but in this case we know that
10546 if (!TREE_SIDE_EFFECTS (arg0)
10547 && !TREE_SIDE_EFFECTS (arg1))
10549 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10550 if (tem && !operand_equal_p (tem, arg0, 0))
10551 return fold_build2 (code, type, tem, arg1);
10553 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10554 if (tem && !operand_equal_p (tem, arg1, 0))
10555 return fold_build2 (code, type, arg0, tem);
10559 /* We only do these simplifications if we are optimizing. */
10563 /* Check for things like (A || B) && (A || C). We can convert this
10564 to A || (B && C). Note that either operator can be any of the four
10565 truth and/or operations and the transformation will still be
10566 valid. Also note that we only care about order for the
10567 ANDIF and ORIF operators. If B contains side effects, this
10568 might change the truth-value of A. */
10569 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10570 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10571 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10572 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10573 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10574 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10576 tree a00 = TREE_OPERAND (arg0, 0);
10577 tree a01 = TREE_OPERAND (arg0, 1);
10578 tree a10 = TREE_OPERAND (arg1, 0);
10579 tree a11 = TREE_OPERAND (arg1, 1);
10580 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10581 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10582 && (code == TRUTH_AND_EXPR
10583 || code == TRUTH_OR_EXPR));
10585 if (operand_equal_p (a00, a10, 0))
10586 return fold_build2 (TREE_CODE (arg0), type, a00,
10587 fold_build2 (code, type, a01, a11));
10588 else if (commutative && operand_equal_p (a00, a11, 0))
10589 return fold_build2 (TREE_CODE (arg0), type, a00,
10590 fold_build2 (code, type, a01, a10));
10591 else if (commutative && operand_equal_p (a01, a10, 0))
10592 return fold_build2 (TREE_CODE (arg0), type, a01,
10593 fold_build2 (code, type, a00, a11));
10595 /* This case if tricky because we must either have commutative
10596 operators or else A10 must not have side-effects. */
10598 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10599 && operand_equal_p (a01, a11, 0))
10600 return fold_build2 (TREE_CODE (arg0), type,
10601 fold_build2 (code, type, a00, a10),
10605 /* See if we can build a range comparison. */
10606 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10609 /* Check for the possibility of merging component references. If our
10610 lhs is another similar operation, try to merge its rhs with our
10611 rhs. Then try to merge our lhs and rhs. */
10612 if (TREE_CODE (arg0) == code
10613 && 0 != (tem = fold_truthop (code, type,
10614 TREE_OPERAND (arg0, 1), arg1)))
10615 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10617 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10622 case TRUTH_ORIF_EXPR:
10623 /* Note that the operands of this must be ints
10624 and their values must be 0 or true.
10625 ("true" is a fixed value perhaps depending on the language.) */
10626 /* If first arg is constant true, return it. */
10627 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10628 return fold_convert (type, arg0);
10629 case TRUTH_OR_EXPR:
10630 /* If either arg is constant zero, drop it. */
10631 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10632 return non_lvalue (fold_convert (type, arg1));
10633 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10634 /* Preserve sequence points. */
10635 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10636 return non_lvalue (fold_convert (type, arg0));
10637 /* If second arg is constant true, result is true, but we must
10638 evaluate first arg. */
10639 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10640 return omit_one_operand (type, arg1, arg0);
10641 /* Likewise for first arg, but note this only occurs here for
10643 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10644 return omit_one_operand (type, arg0, arg1);
10646 /* !X || X is always true. */
10647 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10648 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10649 return omit_one_operand (type, integer_one_node, arg1);
10650 /* X || !X is always true. */
10651 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10652 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10653 return omit_one_operand (type, integer_one_node, arg0);
10657 case TRUTH_XOR_EXPR:
10658 /* If the second arg is constant zero, drop it. */
10659 if (integer_zerop (arg1))
10660 return non_lvalue (fold_convert (type, arg0));
10661 /* If the second arg is constant true, this is a logical inversion. */
10662 if (integer_onep (arg1))
10664 /* Only call invert_truthvalue if operand is a truth value. */
10665 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10666 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10668 tem = invert_truthvalue (arg0);
10669 return non_lvalue (fold_convert (type, tem));
10671 /* Identical arguments cancel to zero. */
10672 if (operand_equal_p (arg0, arg1, 0))
10673 return omit_one_operand (type, integer_zero_node, arg0);
10675 /* !X ^ X is always true. */
10676 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10677 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10678 return omit_one_operand (type, integer_one_node, arg1);
10680 /* X ^ !X is always true. */
10681 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10682 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10683 return omit_one_operand (type, integer_one_node, arg0);
10689 tem = fold_comparison (code, type, op0, op1);
10690 if (tem != NULL_TREE)
10693 /* bool_var != 0 becomes bool_var. */
10694 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10695 && code == NE_EXPR)
10696 return non_lvalue (fold_convert (type, arg0));
10698 /* bool_var == 1 becomes bool_var. */
10699 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10700 && code == EQ_EXPR)
10701 return non_lvalue (fold_convert (type, arg0));
10703 /* bool_var != 1 becomes !bool_var. */
10704 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10705 && code == NE_EXPR)
10706 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10708 /* bool_var == 0 becomes !bool_var. */
10709 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10710 && code == EQ_EXPR)
10711 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10713 /* If this is an equality comparison of the address of a non-weak
10714 object against zero, then we know the result. */
10715 if (TREE_CODE (arg0) == ADDR_EXPR
10716 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10717 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10718 && integer_zerop (arg1))
10719 return constant_boolean_node (code != EQ_EXPR, type);
10721 /* If this is an equality comparison of the address of two non-weak,
10722 unaliased symbols neither of which are extern (since we do not
10723 have access to attributes for externs), then we know the result. */
10724 if (TREE_CODE (arg0) == ADDR_EXPR
10725 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10726 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10727 && ! lookup_attribute ("alias",
10728 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10729 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10730 && TREE_CODE (arg1) == ADDR_EXPR
10731 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10732 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10733 && ! lookup_attribute ("alias",
10734 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10735 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10737 /* We know that we're looking at the address of two
10738 non-weak, unaliased, static _DECL nodes.
10740 It is both wasteful and incorrect to call operand_equal_p
10741 to compare the two ADDR_EXPR nodes. It is wasteful in that
10742 all we need to do is test pointer equality for the arguments
10743 to the two ADDR_EXPR nodes. It is incorrect to use
10744 operand_equal_p as that function is NOT equivalent to a
10745 C equality test. It can in fact return false for two
10746 objects which would test as equal using the C equality
10748 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10749 return constant_boolean_node (equal
10750 ? code == EQ_EXPR : code != EQ_EXPR,
10754 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10755 a MINUS_EXPR of a constant, we can convert it into a comparison with
10756 a revised constant as long as no overflow occurs. */
10757 if (TREE_CODE (arg1) == INTEGER_CST
10758 && (TREE_CODE (arg0) == PLUS_EXPR
10759 || TREE_CODE (arg0) == MINUS_EXPR)
10760 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10761 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10762 ? MINUS_EXPR : PLUS_EXPR,
10763 fold_convert (TREE_TYPE (arg0), arg1),
10764 TREE_OPERAND (arg0, 1), 0))
10765 && ! TREE_CONSTANT_OVERFLOW (tem))
10766 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10768 /* Similarly for a NEGATE_EXPR. */
10769 if (TREE_CODE (arg0) == NEGATE_EXPR
10770 && TREE_CODE (arg1) == INTEGER_CST
10771 && 0 != (tem = negate_expr (arg1))
10772 && TREE_CODE (tem) == INTEGER_CST
10773 && ! TREE_CONSTANT_OVERFLOW (tem))
10774 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10776 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10777 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10778 && TREE_CODE (arg1) == INTEGER_CST
10779 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10780 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10781 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10782 fold_convert (TREE_TYPE (arg0), arg1),
10783 TREE_OPERAND (arg0, 1)));
10785 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10786 for !=. Don't do this for ordered comparisons due to overflow. */
10787 if (TREE_CODE (arg0) == MINUS_EXPR
10788 && integer_zerop (arg1))
10789 return fold_build2 (code, type,
10790 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10792 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10793 if (TREE_CODE (arg0) == ABS_EXPR
10794 && (integer_zerop (arg1) || real_zerop (arg1)))
10795 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10797 /* If this is an EQ or NE comparison with zero and ARG0 is
10798 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10799 two operations, but the latter can be done in one less insn
10800 on machines that have only two-operand insns or on which a
10801 constant cannot be the first operand. */
10802 if (TREE_CODE (arg0) == BIT_AND_EXPR
10803 && integer_zerop (arg1))
10805 tree arg00 = TREE_OPERAND (arg0, 0);
10806 tree arg01 = TREE_OPERAND (arg0, 1);
10807 if (TREE_CODE (arg00) == LSHIFT_EXPR
10808 && integer_onep (TREE_OPERAND (arg00, 0)))
10810 fold_build2 (code, type,
10811 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10812 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10813 arg01, TREE_OPERAND (arg00, 1)),
10814 fold_convert (TREE_TYPE (arg0),
10815 integer_one_node)),
10817 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10818 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10820 fold_build2 (code, type,
10821 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10822 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10823 arg00, TREE_OPERAND (arg01, 1)),
10824 fold_convert (TREE_TYPE (arg0),
10825 integer_one_node)),
10829 /* If this is an NE or EQ comparison of zero against the result of a
10830 signed MOD operation whose second operand is a power of 2, make
10831 the MOD operation unsigned since it is simpler and equivalent. */
10832 if (integer_zerop (arg1)
10833 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10834 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10835 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10836 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10837 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10838 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10840 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10841 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10842 fold_convert (newtype,
10843 TREE_OPERAND (arg0, 0)),
10844 fold_convert (newtype,
10845 TREE_OPERAND (arg0, 1)));
10847 return fold_build2 (code, type, newmod,
10848 fold_convert (newtype, arg1));
10851 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10852 C1 is a valid shift constant, and C2 is a power of two, i.e.
10854 if (TREE_CODE (arg0) == BIT_AND_EXPR
10855 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10856 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10858 && integer_pow2p (TREE_OPERAND (arg0, 1))
10859 && integer_zerop (arg1))
10861 tree itype = TREE_TYPE (arg0);
10862 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10863 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10865 /* Check for a valid shift count. */
10866 if (TREE_INT_CST_HIGH (arg001) == 0
10867 && TREE_INT_CST_LOW (arg001) < prec)
10869 tree arg01 = TREE_OPERAND (arg0, 1);
10870 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10871 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10872 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10873 can be rewritten as (X & (C2 << C1)) != 0. */
10874 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10876 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10877 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10878 return fold_build2 (code, type, tem, arg1);
10880 /* Otherwise, for signed (arithmetic) shifts,
10881 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10882 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10883 else if (!TYPE_UNSIGNED (itype))
10884 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10885 arg000, build_int_cst (itype, 0));
10886 /* Otherwise, of unsigned (logical) shifts,
10887 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10888 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10890 return omit_one_operand (type,
10891 code == EQ_EXPR ? integer_one_node
10892 : integer_zero_node,
10897 /* If this is an NE comparison of zero with an AND of one, remove the
10898 comparison since the AND will give the correct value. */
10899 if (code == NE_EXPR
10900 && integer_zerop (arg1)
10901 && TREE_CODE (arg0) == BIT_AND_EXPR
10902 && integer_onep (TREE_OPERAND (arg0, 1)))
10903 return fold_convert (type, arg0);
10905 /* If we have (A & C) == C where C is a power of 2, convert this into
10906 (A & C) != 0. Similarly for NE_EXPR. */
10907 if (TREE_CODE (arg0) == BIT_AND_EXPR
10908 && integer_pow2p (TREE_OPERAND (arg0, 1))
10909 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10910 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10911 arg0, fold_convert (TREE_TYPE (arg0),
10912 integer_zero_node));
10914 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10915 bit, then fold the expression into A < 0 or A >= 0. */
10916 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10920 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10921 Similarly for NE_EXPR. */
10922 if (TREE_CODE (arg0) == BIT_AND_EXPR
10923 && TREE_CODE (arg1) == INTEGER_CST
10924 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10926 tree notc = fold_build1 (BIT_NOT_EXPR,
10927 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10928 TREE_OPERAND (arg0, 1));
10929 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10931 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10932 if (integer_nonzerop (dandnotc))
10933 return omit_one_operand (type, rslt, arg0);
10936 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10937 Similarly for NE_EXPR. */
10938 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10939 && TREE_CODE (arg1) == INTEGER_CST
10940 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10942 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10943 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10944 TREE_OPERAND (arg0, 1), notd);
10945 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10946 if (integer_nonzerop (candnotd))
10947 return omit_one_operand (type, rslt, arg0);
10950 /* If this is a comparison of a field, we may be able to simplify it. */
10951 if (((TREE_CODE (arg0) == COMPONENT_REF
10952 && lang_hooks.can_use_bit_fields_p ())
10953 || TREE_CODE (arg0) == BIT_FIELD_REF)
10954 /* Handle the constant case even without -O
10955 to make sure the warnings are given. */
10956 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10958 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10963 /* Optimize comparisons of strlen vs zero to a compare of the
10964 first character of the string vs zero. To wit,
10965 strlen(ptr) == 0 => *ptr == 0
10966 strlen(ptr) != 0 => *ptr != 0
10967 Other cases should reduce to one of these two (or a constant)
10968 due to the return value of strlen being unsigned. */
10969 if (TREE_CODE (arg0) == CALL_EXPR
10970 && integer_zerop (arg1))
10972 tree fndecl = get_callee_fndecl (arg0);
10976 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10977 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10978 && (arglist = TREE_OPERAND (arg0, 1))
10979 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10980 && ! TREE_CHAIN (arglist))
10982 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10983 return fold_build2 (code, type, iref,
10984 build_int_cst (TREE_TYPE (iref), 0));
10988 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10989 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10990 if (TREE_CODE (arg0) == RSHIFT_EXPR
10991 && integer_zerop (arg1)
10992 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10994 tree arg00 = TREE_OPERAND (arg0, 0);
10995 tree arg01 = TREE_OPERAND (arg0, 1);
10996 tree itype = TREE_TYPE (arg00);
10997 if (TREE_INT_CST_HIGH (arg01) == 0
10998 && TREE_INT_CST_LOW (arg01)
10999 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11001 if (TYPE_UNSIGNED (itype))
11003 itype = lang_hooks.types.signed_type (itype);
11004 arg00 = fold_convert (itype, arg00);
11006 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11007 type, arg00, build_int_cst (itype, 0));
11011 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11012 if (integer_zerop (arg1)
11013 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11014 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11015 TREE_OPERAND (arg0, 1));
11017 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11018 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11019 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11020 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11021 build_int_cst (TREE_TYPE (arg1), 0));
11022 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11023 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11024 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11025 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11026 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11027 build_int_cst (TREE_TYPE (arg1), 0));
11029 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11030 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11031 && TREE_CODE (arg1) == INTEGER_CST
11032 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11033 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11034 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11035 TREE_OPERAND (arg0, 1), arg1));
11037 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11038 (X & C) == 0 when C is a single bit. */
11039 if (TREE_CODE (arg0) == BIT_AND_EXPR
11040 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11041 && integer_zerop (arg1)
11042 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11044 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11045 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11046 TREE_OPERAND (arg0, 1));
11047 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11051 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11052 constant C is a power of two, i.e. a single bit. */
11053 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11054 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11055 && integer_zerop (arg1)
11056 && integer_pow2p (TREE_OPERAND (arg0, 1))
11057 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11058 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11060 tree arg00 = TREE_OPERAND (arg0, 0);
11061 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11062 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11065 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11066 when is C is a power of two, i.e. a single bit. */
11067 if (TREE_CODE (arg0) == BIT_AND_EXPR
11068 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11069 && integer_zerop (arg1)
11070 && integer_pow2p (TREE_OPERAND (arg0, 1))
11071 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11072 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11074 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11075 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11076 arg000, TREE_OPERAND (arg0, 1));
11077 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11078 tem, build_int_cst (TREE_TYPE (tem), 0));
11081 if (integer_zerop (arg1)
11082 && tree_expr_nonzero_p (arg0))
11084 tree res = constant_boolean_node (code==NE_EXPR, type);
11085 return omit_one_operand (type, res, arg0);
11088 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11089 if (TREE_CODE (arg0) == NEGATE_EXPR
11090 && TREE_CODE (arg1) == NEGATE_EXPR)
11091 return fold_build2 (code, type,
11092 TREE_OPERAND (arg0, 0),
11093 TREE_OPERAND (arg1, 0));
11095 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11096 if (TREE_CODE (arg0) == BIT_AND_EXPR
11097 && TREE_CODE (arg1) == BIT_AND_EXPR)
11099 tree arg00 = TREE_OPERAND (arg0, 0);
11100 tree arg01 = TREE_OPERAND (arg0, 1);
11101 tree arg10 = TREE_OPERAND (arg1, 0);
11102 tree arg11 = TREE_OPERAND (arg1, 1);
11103 tree itype = TREE_TYPE (arg0);
11105 if (operand_equal_p (arg01, arg11, 0))
11106 return fold_build2 (code, type,
11107 fold_build2 (BIT_AND_EXPR, itype,
11108 fold_build2 (BIT_XOR_EXPR, itype,
11111 build_int_cst (itype, 0));
11113 if (operand_equal_p (arg01, arg10, 0))
11114 return fold_build2 (code, type,
11115 fold_build2 (BIT_AND_EXPR, itype,
11116 fold_build2 (BIT_XOR_EXPR, itype,
11119 build_int_cst (itype, 0));
11121 if (operand_equal_p (arg00, arg11, 0))
11122 return fold_build2 (code, type,
11123 fold_build2 (BIT_AND_EXPR, itype,
11124 fold_build2 (BIT_XOR_EXPR, itype,
11127 build_int_cst (itype, 0));
11129 if (operand_equal_p (arg00, arg10, 0))
11130 return fold_build2 (code, type,
11131 fold_build2 (BIT_AND_EXPR, itype,
11132 fold_build2 (BIT_XOR_EXPR, itype,
11135 build_int_cst (itype, 0));
11138 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11139 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11141 tree arg00 = TREE_OPERAND (arg0, 0);
11142 tree arg01 = TREE_OPERAND (arg0, 1);
11143 tree arg10 = TREE_OPERAND (arg1, 0);
11144 tree arg11 = TREE_OPERAND (arg1, 1);
11145 tree itype = TREE_TYPE (arg0);
11147 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11148 operand_equal_p guarantees no side-effects so we don't need
11149 to use omit_one_operand on Z. */
11150 if (operand_equal_p (arg01, arg11, 0))
11151 return fold_build2 (code, type, arg00, arg10);
11152 if (operand_equal_p (arg01, arg10, 0))
11153 return fold_build2 (code, type, arg00, arg11);
11154 if (operand_equal_p (arg00, arg11, 0))
11155 return fold_build2 (code, type, arg01, arg10);
11156 if (operand_equal_p (arg00, arg10, 0))
11157 return fold_build2 (code, type, arg01, arg11);
11159 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11160 if (TREE_CODE (arg01) == INTEGER_CST
11161 && TREE_CODE (arg11) == INTEGER_CST)
11162 return fold_build2 (code, type,
11163 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11164 fold_build2 (BIT_XOR_EXPR, itype,
11174 tem = fold_comparison (code, type, op0, op1);
11175 if (tem != NULL_TREE)
11178 /* Transform comparisons of the form X +- C CMP X. */
11179 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11180 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11181 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11182 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11183 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11184 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
11185 && !(flag_wrapv || flag_trapv))))
11187 tree arg01 = TREE_OPERAND (arg0, 1);
11188 enum tree_code code0 = TREE_CODE (arg0);
11191 if (TREE_CODE (arg01) == REAL_CST)
11192 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11194 is_positive = tree_int_cst_sgn (arg01);
11196 /* (X - c) > X becomes false. */
11197 if (code == GT_EXPR
11198 && ((code0 == MINUS_EXPR && is_positive >= 0)
11199 || (code0 == PLUS_EXPR && is_positive <= 0)))
11200 return constant_boolean_node (0, type);
11202 /* Likewise (X + c) < X becomes false. */
11203 if (code == LT_EXPR
11204 && ((code0 == PLUS_EXPR && is_positive >= 0)
11205 || (code0 == MINUS_EXPR && is_positive <= 0)))
11206 return constant_boolean_node (0, type);
11208 /* Convert (X - c) <= X to true. */
11209 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11211 && ((code0 == MINUS_EXPR && is_positive >= 0)
11212 || (code0 == PLUS_EXPR && is_positive <= 0)))
11213 return constant_boolean_node (1, type);
11215 /* Convert (X + c) >= X to true. */
11216 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11218 && ((code0 == PLUS_EXPR && is_positive >= 0)
11219 || (code0 == MINUS_EXPR && is_positive <= 0)))
11220 return constant_boolean_node (1, type);
11222 if (TREE_CODE (arg01) == INTEGER_CST)
11224 /* Convert X + c > X and X - c < X to true for integers. */
11225 if (code == GT_EXPR
11226 && ((code0 == PLUS_EXPR && is_positive > 0)
11227 || (code0 == MINUS_EXPR && is_positive < 0)))
11228 return constant_boolean_node (1, type);
11230 if (code == LT_EXPR
11231 && ((code0 == MINUS_EXPR && is_positive > 0)
11232 || (code0 == PLUS_EXPR && is_positive < 0)))
11233 return constant_boolean_node (1, type);
11235 /* Convert X + c <= X and X - c >= X to false for integers. */
11236 if (code == LE_EXPR
11237 && ((code0 == PLUS_EXPR && is_positive > 0)
11238 || (code0 == MINUS_EXPR && is_positive < 0)))
11239 return constant_boolean_node (0, type);
11241 if (code == GE_EXPR
11242 && ((code0 == MINUS_EXPR && is_positive > 0)
11243 || (code0 == PLUS_EXPR && is_positive < 0)))
11244 return constant_boolean_node (0, type);
11248 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11249 This transformation affects the cases which are handled in later
11250 optimizations involving comparisons with non-negative constants. */
11251 if (TREE_CODE (arg1) == INTEGER_CST
11252 && TREE_CODE (arg0) != INTEGER_CST
11253 && tree_int_cst_sgn (arg1) > 0)
11255 if (code == GE_EXPR)
11257 arg1 = const_binop (MINUS_EXPR, arg1,
11258 build_int_cst (TREE_TYPE (arg1), 1), 0);
11259 return fold_build2 (GT_EXPR, type, arg0,
11260 fold_convert (TREE_TYPE (arg0), arg1));
11262 if (code == LT_EXPR)
11264 arg1 = const_binop (MINUS_EXPR, arg1,
11265 build_int_cst (TREE_TYPE (arg1), 1), 0);
11266 return fold_build2 (LE_EXPR, type, arg0,
11267 fold_convert (TREE_TYPE (arg0), arg1));
11271 /* Comparisons with the highest or lowest possible integer of
11272 the specified precision will have known values. */
11274 tree arg1_type = TREE_TYPE (arg1);
11275 unsigned int width = TYPE_PRECISION (arg1_type);
11277 if (TREE_CODE (arg1) == INTEGER_CST
11278 && ! TREE_CONSTANT_OVERFLOW (arg1)
11279 && width <= 2 * HOST_BITS_PER_WIDE_INT
11280 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11282 HOST_WIDE_INT signed_max_hi;
11283 unsigned HOST_WIDE_INT signed_max_lo;
11284 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11286 if (width <= HOST_BITS_PER_WIDE_INT)
11288 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11293 if (TYPE_UNSIGNED (arg1_type))
11295 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11301 max_lo = signed_max_lo;
11302 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11308 width -= HOST_BITS_PER_WIDE_INT;
11309 signed_max_lo = -1;
11310 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11315 if (TYPE_UNSIGNED (arg1_type))
11317 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11322 max_hi = signed_max_hi;
11323 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11327 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11328 && TREE_INT_CST_LOW (arg1) == max_lo)
11332 return omit_one_operand (type, integer_zero_node, arg0);
11335 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11338 return omit_one_operand (type, integer_one_node, arg0);
11341 return fold_build2 (NE_EXPR, type, arg0, arg1);
11343 /* The GE_EXPR and LT_EXPR cases above are not normally
11344 reached because of previous transformations. */
11349 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11351 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11355 arg1 = const_binop (PLUS_EXPR, arg1,
11356 build_int_cst (TREE_TYPE (arg1), 1), 0);
11357 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11359 arg1 = const_binop (PLUS_EXPR, arg1,
11360 build_int_cst (TREE_TYPE (arg1), 1), 0);
11361 return fold_build2 (NE_EXPR, type, arg0, arg1);
11365 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11367 && TREE_INT_CST_LOW (arg1) == min_lo)
11371 return omit_one_operand (type, integer_zero_node, arg0);
11374 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11377 return omit_one_operand (type, integer_one_node, arg0);
11380 return fold_build2 (NE_EXPR, type, op0, op1);
11385 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11387 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11391 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11392 return fold_build2 (NE_EXPR, type, arg0, arg1);
11394 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11395 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11400 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11401 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11402 && TYPE_UNSIGNED (arg1_type)
11403 /* We will flip the signedness of the comparison operator
11404 associated with the mode of arg1, so the sign bit is
11405 specified by this mode. Check that arg1 is the signed
11406 max associated with this sign bit. */
11407 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11408 /* signed_type does not work on pointer types. */
11409 && INTEGRAL_TYPE_P (arg1_type))
11411 /* The following case also applies to X < signed_max+1
11412 and X >= signed_max+1 because previous transformations. */
11413 if (code == LE_EXPR || code == GT_EXPR)
11416 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11417 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11418 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11419 type, fold_convert (st0, arg0),
11420 build_int_cst (st1, 0));
11426 /* If we are comparing an ABS_EXPR with a constant, we can
11427 convert all the cases into explicit comparisons, but they may
11428 well not be faster than doing the ABS and one comparison.
11429 But ABS (X) <= C is a range comparison, which becomes a subtraction
11430 and a comparison, and is probably faster. */
11431 if (code == LE_EXPR
11432 && TREE_CODE (arg1) == INTEGER_CST
11433 && TREE_CODE (arg0) == ABS_EXPR
11434 && ! TREE_SIDE_EFFECTS (arg0)
11435 && (0 != (tem = negate_expr (arg1)))
11436 && TREE_CODE (tem) == INTEGER_CST
11437 && ! TREE_CONSTANT_OVERFLOW (tem))
11438 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11439 build2 (GE_EXPR, type,
11440 TREE_OPERAND (arg0, 0), tem),
11441 build2 (LE_EXPR, type,
11442 TREE_OPERAND (arg0, 0), arg1));
11444 /* Convert ABS_EXPR<x> >= 0 to true. */
11445 if (code == GE_EXPR
11446 && tree_expr_nonnegative_p (arg0)
11447 && (integer_zerop (arg1)
11448 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11449 && real_zerop (arg1))))
11450 return omit_one_operand (type, integer_one_node, arg0);
11452 /* Convert ABS_EXPR<x> < 0 to false. */
11453 if (code == LT_EXPR
11454 && tree_expr_nonnegative_p (arg0)
11455 && (integer_zerop (arg1) || real_zerop (arg1)))
11456 return omit_one_operand (type, integer_zero_node, arg0);
11458 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11459 and similarly for >= into !=. */
11460 if ((code == LT_EXPR || code == GE_EXPR)
11461 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11462 && TREE_CODE (arg1) == LSHIFT_EXPR
11463 && integer_onep (TREE_OPERAND (arg1, 0)))
11464 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11465 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11466 TREE_OPERAND (arg1, 1)),
11467 build_int_cst (TREE_TYPE (arg0), 0));
11469 if ((code == LT_EXPR || code == GE_EXPR)
11470 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11471 && (TREE_CODE (arg1) == NOP_EXPR
11472 || TREE_CODE (arg1) == CONVERT_EXPR)
11473 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11474 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11476 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11477 fold_convert (TREE_TYPE (arg0),
11478 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11479 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11481 build_int_cst (TREE_TYPE (arg0), 0));
11485 case UNORDERED_EXPR:
11493 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11495 t1 = fold_relational_const (code, type, arg0, arg1);
11496 if (t1 != NULL_TREE)
11500 /* If the first operand is NaN, the result is constant. */
11501 if (TREE_CODE (arg0) == REAL_CST
11502 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11503 && (code != LTGT_EXPR || ! flag_trapping_math))
11505 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11506 ? integer_zero_node
11507 : integer_one_node;
11508 return omit_one_operand (type, t1, arg1);
11511 /* If the second operand is NaN, the result is constant. */
11512 if (TREE_CODE (arg1) == REAL_CST
11513 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11514 && (code != LTGT_EXPR || ! flag_trapping_math))
11516 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11517 ? integer_zero_node
11518 : integer_one_node;
11519 return omit_one_operand (type, t1, arg0);
11522 /* Simplify unordered comparison of something with itself. */
11523 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11524 && operand_equal_p (arg0, arg1, 0))
11525 return constant_boolean_node (1, type);
11527 if (code == LTGT_EXPR
11528 && !flag_trapping_math
11529 && operand_equal_p (arg0, arg1, 0))
11530 return constant_boolean_node (0, type);
11532 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11534 tree targ0 = strip_float_extensions (arg0);
11535 tree targ1 = strip_float_extensions (arg1);
11536 tree newtype = TREE_TYPE (targ0);
11538 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11539 newtype = TREE_TYPE (targ1);
11541 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11542 return fold_build2 (code, type, fold_convert (newtype, targ0),
11543 fold_convert (newtype, targ1));
11548 case COMPOUND_EXPR:
11549 /* When pedantic, a compound expression can be neither an lvalue
11550 nor an integer constant expression. */
11551 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11553 /* Don't let (0, 0) be null pointer constant. */
11554 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11555 : fold_convert (type, arg1);
11556 return pedantic_non_lvalue (tem);
11559 if ((TREE_CODE (arg0) == REAL_CST
11560 && TREE_CODE (arg1) == REAL_CST)
11561 || (TREE_CODE (arg0) == INTEGER_CST
11562 && TREE_CODE (arg1) == INTEGER_CST))
11563 return build_complex (type, arg0, arg1);
11567 /* An ASSERT_EXPR should never be passed to fold_binary. */
11568 gcc_unreachable ();
11572 } /* switch (code) */
11575 /* Callback for walk_tree, looking for LABEL_EXPR.
11576 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11577 Do not check the sub-tree of GOTO_EXPR. */
11580 contains_label_1 (tree *tp,
11581 int *walk_subtrees,
11582 void *data ATTRIBUTE_UNUSED)
11584 switch (TREE_CODE (*tp))
11589 *walk_subtrees = 0;
11596 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11597 accessible from outside the sub-tree. Returns NULL_TREE if no
11598 addressable label is found. */
11601 contains_label_p (tree st)
11603 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11606 /* Fold a ternary expression of code CODE and type TYPE with operands
11607 OP0, OP1, and OP2. Return the folded expression if folding is
11608 successful. Otherwise, return NULL_TREE. */
11611 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11614 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11615 enum tree_code_class kind = TREE_CODE_CLASS (code);
11617 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11618 && TREE_CODE_LENGTH (code) == 3);
11620 /* Strip any conversions that don't change the mode. This is safe
11621 for every expression, except for a comparison expression because
11622 its signedness is derived from its operands. So, in the latter
11623 case, only strip conversions that don't change the signedness.
11625 Note that this is done as an internal manipulation within the
11626 constant folder, in order to find the simplest representation of
11627 the arguments so that their form can be studied. In any cases,
11628 the appropriate type conversions should be put back in the tree
11629 that will get out of the constant folder. */
11644 case COMPONENT_REF:
11645 if (TREE_CODE (arg0) == CONSTRUCTOR
11646 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11648 unsigned HOST_WIDE_INT idx;
11650 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11657 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11658 so all simple results must be passed through pedantic_non_lvalue. */
11659 if (TREE_CODE (arg0) == INTEGER_CST)
11661 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11662 tem = integer_zerop (arg0) ? op2 : op1;
11663 /* Only optimize constant conditions when the selected branch
11664 has the same type as the COND_EXPR. This avoids optimizing
11665 away "c ? x : throw", where the throw has a void type.
11666 Avoid throwing away that operand which contains label. */
11667 if ((!TREE_SIDE_EFFECTS (unused_op)
11668 || !contains_label_p (unused_op))
11669 && (! VOID_TYPE_P (TREE_TYPE (tem))
11670 || VOID_TYPE_P (type)))
11671 return pedantic_non_lvalue (tem);
11674 if (operand_equal_p (arg1, op2, 0))
11675 return pedantic_omit_one_operand (type, arg1, arg0);
11677 /* If we have A op B ? A : C, we may be able to convert this to a
11678 simpler expression, depending on the operation and the values
11679 of B and C. Signed zeros prevent all of these transformations,
11680 for reasons given above each one.
11682 Also try swapping the arguments and inverting the conditional. */
11683 if (COMPARISON_CLASS_P (arg0)
11684 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11685 arg1, TREE_OPERAND (arg0, 1))
11686 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11688 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11693 if (COMPARISON_CLASS_P (arg0)
11694 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11696 TREE_OPERAND (arg0, 1))
11697 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11699 tem = fold_truth_not_expr (arg0);
11700 if (tem && COMPARISON_CLASS_P (tem))
11702 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11708 /* If the second operand is simpler than the third, swap them
11709 since that produces better jump optimization results. */
11710 if (truth_value_p (TREE_CODE (arg0))
11711 && tree_swap_operands_p (op1, op2, false))
11713 /* See if this can be inverted. If it can't, possibly because
11714 it was a floating-point inequality comparison, don't do
11716 tem = fold_truth_not_expr (arg0);
11718 return fold_build3 (code, type, tem, op2, op1);
11721 /* Convert A ? 1 : 0 to simply A. */
11722 if (integer_onep (op1)
11723 && integer_zerop (op2)
11724 /* If we try to convert OP0 to our type, the
11725 call to fold will try to move the conversion inside
11726 a COND, which will recurse. In that case, the COND_EXPR
11727 is probably the best choice, so leave it alone. */
11728 && type == TREE_TYPE (arg0))
11729 return pedantic_non_lvalue (arg0);
11731 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11732 over COND_EXPR in cases such as floating point comparisons. */
11733 if (integer_zerop (op1)
11734 && integer_onep (op2)
11735 && truth_value_p (TREE_CODE (arg0)))
11736 return pedantic_non_lvalue (fold_convert (type,
11737 invert_truthvalue (arg0)));
11739 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11740 if (TREE_CODE (arg0) == LT_EXPR
11741 && integer_zerop (TREE_OPERAND (arg0, 1))
11742 && integer_zerop (op2)
11743 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11745 /* sign_bit_p only checks ARG1 bits within A's precision.
11746 If <sign bit of A> has wider type than A, bits outside
11747 of A's precision in <sign bit of A> need to be checked.
11748 If they are all 0, this optimization needs to be done
11749 in unsigned A's type, if they are all 1 in signed A's type,
11750 otherwise this can't be done. */
11751 if (TYPE_PRECISION (TREE_TYPE (tem))
11752 < TYPE_PRECISION (TREE_TYPE (arg1))
11753 && TYPE_PRECISION (TREE_TYPE (tem))
11754 < TYPE_PRECISION (type))
11756 unsigned HOST_WIDE_INT mask_lo;
11757 HOST_WIDE_INT mask_hi;
11758 int inner_width, outer_width;
11761 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11762 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11763 if (outer_width > TYPE_PRECISION (type))
11764 outer_width = TYPE_PRECISION (type);
11766 if (outer_width > HOST_BITS_PER_WIDE_INT)
11768 mask_hi = ((unsigned HOST_WIDE_INT) -1
11769 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11775 mask_lo = ((unsigned HOST_WIDE_INT) -1
11776 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11778 if (inner_width > HOST_BITS_PER_WIDE_INT)
11780 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11781 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11785 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11786 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11788 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11789 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11791 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11792 tem = fold_convert (tem_type, tem);
11794 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11795 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11797 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11798 tem = fold_convert (tem_type, tem);
11805 return fold_convert (type,
11806 fold_build2 (BIT_AND_EXPR,
11807 TREE_TYPE (tem), tem,
11808 fold_convert (TREE_TYPE (tem),
11812 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11813 already handled above. */
11814 if (TREE_CODE (arg0) == BIT_AND_EXPR
11815 && integer_onep (TREE_OPERAND (arg0, 1))
11816 && integer_zerop (op2)
11817 && integer_pow2p (arg1))
11819 tree tem = TREE_OPERAND (arg0, 0);
11821 if (TREE_CODE (tem) == RSHIFT_EXPR
11822 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11823 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11824 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11825 return fold_build2 (BIT_AND_EXPR, type,
11826 TREE_OPERAND (tem, 0), arg1);
11829 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11830 is probably obsolete because the first operand should be a
11831 truth value (that's why we have the two cases above), but let's
11832 leave it in until we can confirm this for all front-ends. */
11833 if (integer_zerop (op2)
11834 && TREE_CODE (arg0) == NE_EXPR
11835 && integer_zerop (TREE_OPERAND (arg0, 1))
11836 && integer_pow2p (arg1)
11837 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11838 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11839 arg1, OEP_ONLY_CONST))
11840 return pedantic_non_lvalue (fold_convert (type,
11841 TREE_OPERAND (arg0, 0)));
11843 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11844 if (integer_zerop (op2)
11845 && truth_value_p (TREE_CODE (arg0))
11846 && truth_value_p (TREE_CODE (arg1)))
11847 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11848 fold_convert (type, arg0),
11851 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11852 if (integer_onep (op2)
11853 && truth_value_p (TREE_CODE (arg0))
11854 && truth_value_p (TREE_CODE (arg1)))
11856 /* Only perform transformation if ARG0 is easily inverted. */
11857 tem = fold_truth_not_expr (arg0);
11859 return fold_build2 (TRUTH_ORIF_EXPR, type,
11860 fold_convert (type, tem),
11864 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11865 if (integer_zerop (arg1)
11866 && truth_value_p (TREE_CODE (arg0))
11867 && truth_value_p (TREE_CODE (op2)))
11869 /* Only perform transformation if ARG0 is easily inverted. */
11870 tem = fold_truth_not_expr (arg0);
11872 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11873 fold_convert (type, tem),
11877 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11878 if (integer_onep (arg1)
11879 && truth_value_p (TREE_CODE (arg0))
11880 && truth_value_p (TREE_CODE (op2)))
11881 return fold_build2 (TRUTH_ORIF_EXPR, type,
11882 fold_convert (type, arg0),
11888 /* Check for a built-in function. */
11889 if (TREE_CODE (op0) == ADDR_EXPR
11890 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11891 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11892 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11895 case BIT_FIELD_REF:
11896 if (TREE_CODE (arg0) == VECTOR_CST
11897 && type == TREE_TYPE (TREE_TYPE (arg0))
11898 && host_integerp (arg1, 1)
11899 && host_integerp (op2, 1))
11901 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11902 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11905 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11906 && (idx % width) == 0
11907 && (idx = idx / width)
11908 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11910 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11911 while (idx-- > 0 && elements)
11912 elements = TREE_CHAIN (elements);
11914 return TREE_VALUE (elements);
11916 return fold_convert (type, integer_zero_node);
11923 } /* switch (code) */
11926 /* Perform constant folding and related simplification of EXPR.
11927 The related simplifications include x*1 => x, x*0 => 0, etc.,
11928 and application of the associative law.
11929 NOP_EXPR conversions may be removed freely (as long as we
11930 are careful not to change the type of the overall expression).
11931 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11932 but we can constant-fold them if they have constant operands. */
11934 #ifdef ENABLE_FOLD_CHECKING
11935 # define fold(x) fold_1 (x)
11936 static tree fold_1 (tree);
11942 const tree t = expr;
11943 enum tree_code code = TREE_CODE (t);
11944 enum tree_code_class kind = TREE_CODE_CLASS (code);
11947 /* Return right away if a constant. */
11948 if (kind == tcc_constant)
11951 if (IS_EXPR_CODE_CLASS (kind)
11952 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11954 tree type = TREE_TYPE (t);
11955 tree op0, op1, op2;
11957 switch (TREE_CODE_LENGTH (code))
11960 op0 = TREE_OPERAND (t, 0);
11961 tem = fold_unary (code, type, op0);
11962 return tem ? tem : expr;
11964 op0 = TREE_OPERAND (t, 0);
11965 op1 = TREE_OPERAND (t, 1);
11966 tem = fold_binary (code, type, op0, op1);
11967 return tem ? tem : expr;
11969 op0 = TREE_OPERAND (t, 0);
11970 op1 = TREE_OPERAND (t, 1);
11971 op2 = TREE_OPERAND (t, 2);
11972 tem = fold_ternary (code, type, op0, op1, op2);
11973 return tem ? tem : expr;
11982 return fold (DECL_INITIAL (t));
11986 } /* switch (code) */
11989 #ifdef ENABLE_FOLD_CHECKING
11992 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11993 static void fold_check_failed (tree, tree);
11994 void print_fold_checksum (tree);
11996 /* When --enable-checking=fold, compute a digest of expr before
11997 and after actual fold call to see if fold did not accidentally
11998 change original expr. */
12004 struct md5_ctx ctx;
12005 unsigned char checksum_before[16], checksum_after[16];
12008 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12009 md5_init_ctx (&ctx);
12010 fold_checksum_tree (expr, &ctx, ht);
12011 md5_finish_ctx (&ctx, checksum_before);
12014 ret = fold_1 (expr);
12016 md5_init_ctx (&ctx);
12017 fold_checksum_tree (expr, &ctx, ht);
12018 md5_finish_ctx (&ctx, checksum_after);
12021 if (memcmp (checksum_before, checksum_after, 16))
12022 fold_check_failed (expr, ret);
12028 print_fold_checksum (tree expr)
12030 struct md5_ctx ctx;
12031 unsigned char checksum[16], cnt;
12034 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12035 md5_init_ctx (&ctx);
12036 fold_checksum_tree (expr, &ctx, ht);
12037 md5_finish_ctx (&ctx, checksum);
12039 for (cnt = 0; cnt < 16; ++cnt)
12040 fprintf (stderr, "%02x", checksum[cnt]);
12041 putc ('\n', stderr);
12045 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12047 internal_error ("fold check: original tree changed by fold");
12051 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12054 enum tree_code code;
12055 struct tree_function_decl buf;
12060 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12061 <= sizeof (struct tree_function_decl))
12062 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12065 slot = htab_find_slot (ht, expr, INSERT);
12069 code = TREE_CODE (expr);
12070 if (TREE_CODE_CLASS (code) == tcc_declaration
12071 && DECL_ASSEMBLER_NAME_SET_P (expr))
12073 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12074 memcpy ((char *) &buf, expr, tree_size (expr));
12075 expr = (tree) &buf;
12076 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12078 else if (TREE_CODE_CLASS (code) == tcc_type
12079 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12080 || TYPE_CACHED_VALUES_P (expr)
12081 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12083 /* Allow these fields to be modified. */
12084 memcpy ((char *) &buf, expr, tree_size (expr));
12085 expr = (tree) &buf;
12086 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12087 TYPE_POINTER_TO (expr) = NULL;
12088 TYPE_REFERENCE_TO (expr) = NULL;
12089 if (TYPE_CACHED_VALUES_P (expr))
12091 TYPE_CACHED_VALUES_P (expr) = 0;
12092 TYPE_CACHED_VALUES (expr) = NULL;
12095 md5_process_bytes (expr, tree_size (expr), ctx);
12096 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12097 if (TREE_CODE_CLASS (code) != tcc_type
12098 && TREE_CODE_CLASS (code) != tcc_declaration
12099 && code != TREE_LIST)
12100 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12101 switch (TREE_CODE_CLASS (code))
12107 md5_process_bytes (TREE_STRING_POINTER (expr),
12108 TREE_STRING_LENGTH (expr), ctx);
12111 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12112 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12115 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12121 case tcc_exceptional:
12125 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12126 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12127 expr = TREE_CHAIN (expr);
12128 goto recursive_label;
12131 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12132 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12138 case tcc_expression:
12139 case tcc_reference:
12140 case tcc_comparison:
12143 case tcc_statement:
12144 len = TREE_CODE_LENGTH (code);
12145 for (i = 0; i < len; ++i)
12146 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12148 case tcc_declaration:
12149 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12150 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12151 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12153 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12154 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12155 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12156 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12157 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12159 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12160 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12162 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12164 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12165 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12166 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12170 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12171 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12172 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12173 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12174 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12175 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12176 if (INTEGRAL_TYPE_P (expr)
12177 || SCALAR_FLOAT_TYPE_P (expr))
12179 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12180 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12182 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12183 if (TREE_CODE (expr) == RECORD_TYPE
12184 || TREE_CODE (expr) == UNION_TYPE
12185 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12186 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12187 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12196 /* Fold a unary tree expression with code CODE of type TYPE with an
12197 operand OP0. Return a folded expression if successful. Otherwise,
12198 return a tree expression with code CODE of type TYPE with an
12202 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12205 #ifdef ENABLE_FOLD_CHECKING
12206 unsigned char checksum_before[16], checksum_after[16];
12207 struct md5_ctx ctx;
12210 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12211 md5_init_ctx (&ctx);
12212 fold_checksum_tree (op0, &ctx, ht);
12213 md5_finish_ctx (&ctx, checksum_before);
12217 tem = fold_unary (code, type, op0);
12219 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12221 #ifdef ENABLE_FOLD_CHECKING
12222 md5_init_ctx (&ctx);
12223 fold_checksum_tree (op0, &ctx, ht);
12224 md5_finish_ctx (&ctx, checksum_after);
12227 if (memcmp (checksum_before, checksum_after, 16))
12228 fold_check_failed (op0, tem);
12233 /* Fold a binary tree expression with code CODE of type TYPE with
12234 operands OP0 and OP1. Return a folded expression if successful.
12235 Otherwise, return a tree expression with code CODE of type TYPE
12236 with operands OP0 and OP1. */
12239 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12243 #ifdef ENABLE_FOLD_CHECKING
12244 unsigned char checksum_before_op0[16],
12245 checksum_before_op1[16],
12246 checksum_after_op0[16],
12247 checksum_after_op1[16];
12248 struct md5_ctx ctx;
12251 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12252 md5_init_ctx (&ctx);
12253 fold_checksum_tree (op0, &ctx, ht);
12254 md5_finish_ctx (&ctx, checksum_before_op0);
12257 md5_init_ctx (&ctx);
12258 fold_checksum_tree (op1, &ctx, ht);
12259 md5_finish_ctx (&ctx, checksum_before_op1);
12263 tem = fold_binary (code, type, op0, op1);
12265 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12267 #ifdef ENABLE_FOLD_CHECKING
12268 md5_init_ctx (&ctx);
12269 fold_checksum_tree (op0, &ctx, ht);
12270 md5_finish_ctx (&ctx, checksum_after_op0);
12273 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12274 fold_check_failed (op0, tem);
12276 md5_init_ctx (&ctx);
12277 fold_checksum_tree (op1, &ctx, ht);
12278 md5_finish_ctx (&ctx, checksum_after_op1);
12281 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12282 fold_check_failed (op1, tem);
12287 /* Fold a ternary tree expression with code CODE of type TYPE with
12288 operands OP0, OP1, and OP2. Return a folded expression if
12289 successful. Otherwise, return a tree expression with code CODE of
12290 type TYPE with operands OP0, OP1, and OP2. */
12293 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12297 #ifdef ENABLE_FOLD_CHECKING
12298 unsigned char checksum_before_op0[16],
12299 checksum_before_op1[16],
12300 checksum_before_op2[16],
12301 checksum_after_op0[16],
12302 checksum_after_op1[16],
12303 checksum_after_op2[16];
12304 struct md5_ctx ctx;
12307 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12308 md5_init_ctx (&ctx);
12309 fold_checksum_tree (op0, &ctx, ht);
12310 md5_finish_ctx (&ctx, checksum_before_op0);
12313 md5_init_ctx (&ctx);
12314 fold_checksum_tree (op1, &ctx, ht);
12315 md5_finish_ctx (&ctx, checksum_before_op1);
12318 md5_init_ctx (&ctx);
12319 fold_checksum_tree (op2, &ctx, ht);
12320 md5_finish_ctx (&ctx, checksum_before_op2);
12324 tem = fold_ternary (code, type, op0, op1, op2);
12326 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12328 #ifdef ENABLE_FOLD_CHECKING
12329 md5_init_ctx (&ctx);
12330 fold_checksum_tree (op0, &ctx, ht);
12331 md5_finish_ctx (&ctx, checksum_after_op0);
12334 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12335 fold_check_failed (op0, tem);
12337 md5_init_ctx (&ctx);
12338 fold_checksum_tree (op1, &ctx, ht);
12339 md5_finish_ctx (&ctx, checksum_after_op1);
12342 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12343 fold_check_failed (op1, tem);
12345 md5_init_ctx (&ctx);
12346 fold_checksum_tree (op2, &ctx, ht);
12347 md5_finish_ctx (&ctx, checksum_after_op2);
12350 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12351 fold_check_failed (op2, tem);
12356 /* Perform constant folding and related simplification of initializer
12357 expression EXPR. These behave identically to "fold_buildN" but ignore
12358 potential run-time traps and exceptions that fold must preserve. */
12360 #define START_FOLD_INIT \
12361 int saved_signaling_nans = flag_signaling_nans;\
12362 int saved_trapping_math = flag_trapping_math;\
12363 int saved_rounding_math = flag_rounding_math;\
12364 int saved_trapv = flag_trapv;\
12365 int saved_folding_initializer = folding_initializer;\
12366 flag_signaling_nans = 0;\
12367 flag_trapping_math = 0;\
12368 flag_rounding_math = 0;\
12370 folding_initializer = 1;
12372 #define END_FOLD_INIT \
12373 flag_signaling_nans = saved_signaling_nans;\
12374 flag_trapping_math = saved_trapping_math;\
12375 flag_rounding_math = saved_rounding_math;\
12376 flag_trapv = saved_trapv;\
12377 folding_initializer = saved_folding_initializer;
12380 fold_build1_initializer (enum tree_code code, tree type, tree op)
12385 result = fold_build1 (code, type, op);
12392 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12397 result = fold_build2 (code, type, op0, op1);
12404 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12410 result = fold_build3 (code, type, op0, op1, op2);
12416 #undef START_FOLD_INIT
12417 #undef END_FOLD_INIT
12419 /* Determine if first argument is a multiple of second argument. Return 0 if
12420 it is not, or we cannot easily determined it to be.
12422 An example of the sort of thing we care about (at this point; this routine
12423 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12424 fold cases do now) is discovering that
12426 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12432 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12434 This code also handles discovering that
12436 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12438 is a multiple of 8 so we don't have to worry about dealing with a
12439 possible remainder.
12441 Note that we *look* inside a SAVE_EXPR only to determine how it was
12442 calculated; it is not safe for fold to do much of anything else with the
12443 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12444 at run time. For example, the latter example above *cannot* be implemented
12445 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12446 evaluation time of the original SAVE_EXPR is not necessarily the same at
12447 the time the new expression is evaluated. The only optimization of this
12448 sort that would be valid is changing
12450 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12454 SAVE_EXPR (I) * SAVE_EXPR (J)
12456 (where the same SAVE_EXPR (J) is used in the original and the
12457 transformed version). */
12460 multiple_of_p (tree type, tree top, tree bottom)
12462 if (operand_equal_p (top, bottom, 0))
12465 if (TREE_CODE (type) != INTEGER_TYPE)
12468 switch (TREE_CODE (top))
12471 /* Bitwise and provides a power of two multiple. If the mask is
12472 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12473 if (!integer_pow2p (bottom))
12478 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12479 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12483 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12484 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12487 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12491 op1 = TREE_OPERAND (top, 1);
12492 /* const_binop may not detect overflow correctly,
12493 so check for it explicitly here. */
12494 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12495 > TREE_INT_CST_LOW (op1)
12496 && TREE_INT_CST_HIGH (op1) == 0
12497 && 0 != (t1 = fold_convert (type,
12498 const_binop (LSHIFT_EXPR,
12501 && ! TREE_OVERFLOW (t1))
12502 return multiple_of_p (type, t1, bottom);
12507 /* Can't handle conversions from non-integral or wider integral type. */
12508 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12509 || (TYPE_PRECISION (type)
12510 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12513 /* .. fall through ... */
12516 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12519 if (TREE_CODE (bottom) != INTEGER_CST
12520 || (TYPE_UNSIGNED (type)
12521 && (tree_int_cst_sgn (top) < 0
12522 || tree_int_cst_sgn (bottom) < 0)))
12524 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12532 /* Return true if `t' is known to be non-negative. */
12535 tree_expr_nonnegative_p (tree t)
12537 if (t == error_mark_node)
12540 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12543 switch (TREE_CODE (t))
12546 /* Query VRP to see if it has recorded any information about
12547 the range of this object. */
12548 return ssa_name_nonnegative_p (t);
12551 /* We can't return 1 if flag_wrapv is set because
12552 ABS_EXPR<INT_MIN> = INT_MIN. */
12553 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12558 return tree_int_cst_sgn (t) >= 0;
12561 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12564 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12565 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12566 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12568 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12569 both unsigned and at least 2 bits shorter than the result. */
12570 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12571 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12572 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12574 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12575 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12576 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12577 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12579 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12580 TYPE_PRECISION (inner2)) + 1;
12581 return prec < TYPE_PRECISION (TREE_TYPE (t));
12587 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12589 /* x * x for floating point x is always non-negative. */
12590 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12592 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12593 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12596 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12597 both unsigned and their total bits is shorter than the result. */
12598 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12599 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12600 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12602 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12603 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12604 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12605 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12606 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12607 < TYPE_PRECISION (TREE_TYPE (t));
12613 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12614 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12620 case TRUNC_DIV_EXPR:
12621 case CEIL_DIV_EXPR:
12622 case FLOOR_DIV_EXPR:
12623 case ROUND_DIV_EXPR:
12624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12625 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12627 case TRUNC_MOD_EXPR:
12628 case CEIL_MOD_EXPR:
12629 case FLOOR_MOD_EXPR:
12630 case ROUND_MOD_EXPR:
12632 case NON_LVALUE_EXPR:
12634 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12636 case COMPOUND_EXPR:
12638 case GIMPLE_MODIFY_STMT:
12639 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12642 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12645 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12646 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12650 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12651 tree outer_type = TREE_TYPE (t);
12653 if (TREE_CODE (outer_type) == REAL_TYPE)
12655 if (TREE_CODE (inner_type) == REAL_TYPE)
12656 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12657 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12659 if (TYPE_UNSIGNED (inner_type))
12661 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12664 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12666 if (TREE_CODE (inner_type) == REAL_TYPE)
12667 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12668 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12669 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12670 && TYPE_UNSIGNED (inner_type);
12677 tree temp = TARGET_EXPR_SLOT (t);
12678 t = TARGET_EXPR_INITIAL (t);
12680 /* If the initializer is non-void, then it's a normal expression
12681 that will be assigned to the slot. */
12682 if (!VOID_TYPE_P (t))
12683 return tree_expr_nonnegative_p (t);
12685 /* Otherwise, the initializer sets the slot in some way. One common
12686 way is an assignment statement at the end of the initializer. */
12689 if (TREE_CODE (t) == BIND_EXPR)
12690 t = expr_last (BIND_EXPR_BODY (t));
12691 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12692 || TREE_CODE (t) == TRY_CATCH_EXPR)
12693 t = expr_last (TREE_OPERAND (t, 0));
12694 else if (TREE_CODE (t) == STATEMENT_LIST)
12699 if ((TREE_CODE (t) == MODIFY_EXPR
12700 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12701 && GENERIC_TREE_OPERAND (t, 0) == temp)
12702 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12709 tree fndecl = get_callee_fndecl (t);
12710 tree arglist = TREE_OPERAND (t, 1);
12711 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12712 switch (DECL_FUNCTION_CODE (fndecl))
12714 CASE_FLT_FN (BUILT_IN_ACOS):
12715 CASE_FLT_FN (BUILT_IN_ACOSH):
12716 CASE_FLT_FN (BUILT_IN_CABS):
12717 CASE_FLT_FN (BUILT_IN_COSH):
12718 CASE_FLT_FN (BUILT_IN_ERFC):
12719 CASE_FLT_FN (BUILT_IN_EXP):
12720 CASE_FLT_FN (BUILT_IN_EXP10):
12721 CASE_FLT_FN (BUILT_IN_EXP2):
12722 CASE_FLT_FN (BUILT_IN_FABS):
12723 CASE_FLT_FN (BUILT_IN_FDIM):
12724 CASE_FLT_FN (BUILT_IN_HYPOT):
12725 CASE_FLT_FN (BUILT_IN_POW10):
12726 CASE_INT_FN (BUILT_IN_FFS):
12727 CASE_INT_FN (BUILT_IN_PARITY):
12728 CASE_INT_FN (BUILT_IN_POPCOUNT):
12729 case BUILT_IN_BSWAP32:
12730 case BUILT_IN_BSWAP64:
12734 CASE_FLT_FN (BUILT_IN_SQRT):
12735 /* sqrt(-0.0) is -0.0. */
12736 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12738 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12740 CASE_FLT_FN (BUILT_IN_ASINH):
12741 CASE_FLT_FN (BUILT_IN_ATAN):
12742 CASE_FLT_FN (BUILT_IN_ATANH):
12743 CASE_FLT_FN (BUILT_IN_CBRT):
12744 CASE_FLT_FN (BUILT_IN_CEIL):
12745 CASE_FLT_FN (BUILT_IN_ERF):
12746 CASE_FLT_FN (BUILT_IN_EXPM1):
12747 CASE_FLT_FN (BUILT_IN_FLOOR):
12748 CASE_FLT_FN (BUILT_IN_FMOD):
12749 CASE_FLT_FN (BUILT_IN_FREXP):
12750 CASE_FLT_FN (BUILT_IN_LCEIL):
12751 CASE_FLT_FN (BUILT_IN_LDEXP):
12752 CASE_FLT_FN (BUILT_IN_LFLOOR):
12753 CASE_FLT_FN (BUILT_IN_LLCEIL):
12754 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12755 CASE_FLT_FN (BUILT_IN_LLRINT):
12756 CASE_FLT_FN (BUILT_IN_LLROUND):
12757 CASE_FLT_FN (BUILT_IN_LRINT):
12758 CASE_FLT_FN (BUILT_IN_LROUND):
12759 CASE_FLT_FN (BUILT_IN_MODF):
12760 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12761 CASE_FLT_FN (BUILT_IN_RINT):
12762 CASE_FLT_FN (BUILT_IN_ROUND):
12763 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12764 CASE_FLT_FN (BUILT_IN_SINH):
12765 CASE_FLT_FN (BUILT_IN_TANH):
12766 CASE_FLT_FN (BUILT_IN_TRUNC):
12767 /* True if the 1st argument is nonnegative. */
12768 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12770 CASE_FLT_FN (BUILT_IN_FMAX):
12771 /* True if the 1st OR 2nd arguments are nonnegative. */
12772 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12773 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12775 CASE_FLT_FN (BUILT_IN_FMIN):
12776 /* True if the 1st AND 2nd arguments are nonnegative. */
12777 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12778 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12780 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12781 /* True if the 2nd argument is nonnegative. */
12782 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12784 CASE_FLT_FN (BUILT_IN_POWI):
12785 /* True if the 1st argument is nonnegative or the second
12786 argument is an even integer. */
12787 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12789 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12790 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12793 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12795 CASE_FLT_FN (BUILT_IN_POW):
12796 /* True if the 1st argument is nonnegative or the second
12797 argument is an even integer valued real. */
12798 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12803 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12804 n = real_to_integer (&c);
12807 REAL_VALUE_TYPE cint;
12808 real_from_integer (&cint, VOIDmode, n,
12809 n < 0 ? -1 : 0, 0);
12810 if (real_identical (&c, &cint))
12814 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12821 /* ... fall through ... */
12824 if (truth_value_p (TREE_CODE (t)))
12825 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12829 /* We don't know sign of `t', so be conservative and return false. */
12833 /* Return true when T is an address and is known to be nonzero.
12834 For floating point we further ensure that T is not denormal.
12835 Similar logic is present in nonzero_address in rtlanal.h. */
12838 tree_expr_nonzero_p (tree t)
12840 tree type = TREE_TYPE (t);
12842 /* Doing something useful for floating point would need more work. */
12843 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12846 switch (TREE_CODE (t))
12849 /* Query VRP to see if it has recorded any information about
12850 the range of this object. */
12851 return ssa_name_nonzero_p (t);
12854 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12857 /* We used to test for !integer_zerop here. This does not work correctly
12858 if TREE_CONSTANT_OVERFLOW (t). */
12859 return (TREE_INT_CST_LOW (t) != 0
12860 || TREE_INT_CST_HIGH (t) != 0);
12863 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12865 /* With the presence of negative values it is hard
12866 to say something. */
12867 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12868 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12870 /* One of operands must be positive and the other non-negative. */
12871 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12872 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12877 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12879 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12880 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12886 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12887 tree outer_type = TREE_TYPE (t);
12889 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12890 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12896 tree base = get_base_address (TREE_OPERAND (t, 0));
12901 /* Weak declarations may link to NULL. */
12902 if (VAR_OR_FUNCTION_DECL_P (base))
12903 return !DECL_WEAK (base);
12905 /* Constants are never weak. */
12906 if (CONSTANT_CLASS_P (base))
12913 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12914 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12917 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12918 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12921 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12923 /* When both operands are nonzero, then MAX must be too. */
12924 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12927 /* MAX where operand 0 is positive is positive. */
12928 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12930 /* MAX where operand 1 is positive is positive. */
12931 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12932 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12936 case COMPOUND_EXPR:
12938 case GIMPLE_MODIFY_STMT:
12940 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12943 case NON_LVALUE_EXPR:
12944 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12947 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12948 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12951 return alloca_call_p (t);
12959 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12960 attempt to fold the expression to a constant without modifying TYPE,
12963 If the expression could be simplified to a constant, then return
12964 the constant. If the expression would not be simplified to a
12965 constant, then return NULL_TREE. */
12968 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12970 tree tem = fold_binary (code, type, op0, op1);
12971 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12974 /* Given the components of a unary expression CODE, TYPE and OP0,
12975 attempt to fold the expression to a constant without modifying
12978 If the expression could be simplified to a constant, then return
12979 the constant. If the expression would not be simplified to a
12980 constant, then return NULL_TREE. */
12983 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12985 tree tem = fold_unary (code, type, op0);
12986 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12989 /* If EXP represents referencing an element in a constant string
12990 (either via pointer arithmetic or array indexing), return the
12991 tree representing the value accessed, otherwise return NULL. */
12994 fold_read_from_constant_string (tree exp)
12996 if ((TREE_CODE (exp) == INDIRECT_REF
12997 || TREE_CODE (exp) == ARRAY_REF)
12998 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13000 tree exp1 = TREE_OPERAND (exp, 0);
13004 if (TREE_CODE (exp) == INDIRECT_REF)
13005 string = string_constant (exp1, &index);
13008 tree low_bound = array_ref_low_bound (exp);
13009 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13011 /* Optimize the special-case of a zero lower bound.
13013 We convert the low_bound to sizetype to avoid some problems
13014 with constant folding. (E.g. suppose the lower bound is 1,
13015 and its mode is QI. Without the conversion,l (ARRAY
13016 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13017 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13018 if (! integer_zerop (low_bound))
13019 index = size_diffop (index, fold_convert (sizetype, low_bound));
13025 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13026 && TREE_CODE (string) == STRING_CST
13027 && TREE_CODE (index) == INTEGER_CST
13028 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13029 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13031 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13032 return fold_convert (TREE_TYPE (exp),
13033 build_int_cst (NULL_TREE,
13034 (TREE_STRING_POINTER (string)
13035 [TREE_INT_CST_LOW (index)])));
13040 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13041 an integer constant or real constant.
13043 TYPE is the type of the result. */
13046 fold_negate_const (tree arg0, tree type)
13048 tree t = NULL_TREE;
13050 switch (TREE_CODE (arg0))
13054 unsigned HOST_WIDE_INT low;
13055 HOST_WIDE_INT high;
13056 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13057 TREE_INT_CST_HIGH (arg0),
13059 t = build_int_cst_wide (type, low, high);
13060 t = force_fit_type (t, 1,
13061 (overflow | TREE_OVERFLOW (arg0))
13062 && !TYPE_UNSIGNED (type),
13063 TREE_CONSTANT_OVERFLOW (arg0));
13068 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13072 gcc_unreachable ();
13078 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13079 an integer constant or real constant.
13081 TYPE is the type of the result. */
13084 fold_abs_const (tree arg0, tree type)
13086 tree t = NULL_TREE;
13088 switch (TREE_CODE (arg0))
13091 /* If the value is unsigned, then the absolute value is
13092 the same as the ordinary value. */
13093 if (TYPE_UNSIGNED (type))
13095 /* Similarly, if the value is non-negative. */
13096 else if (INT_CST_LT (integer_minus_one_node, arg0))
13098 /* If the value is negative, then the absolute value is
13102 unsigned HOST_WIDE_INT low;
13103 HOST_WIDE_INT high;
13104 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13105 TREE_INT_CST_HIGH (arg0),
13107 t = build_int_cst_wide (type, low, high);
13108 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
13109 TREE_CONSTANT_OVERFLOW (arg0));
13114 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13115 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13121 gcc_unreachable ();
13127 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13128 constant. TYPE is the type of the result. */
13131 fold_not_const (tree arg0, tree type)
13133 tree t = NULL_TREE;
13135 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13137 t = build_int_cst_wide (type,
13138 ~ TREE_INT_CST_LOW (arg0),
13139 ~ TREE_INT_CST_HIGH (arg0));
13140 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
13141 TREE_CONSTANT_OVERFLOW (arg0));
13146 /* Given CODE, a relational operator, the target type, TYPE and two
13147 constant operands OP0 and OP1, return the result of the
13148 relational operation. If the result is not a compile time
13149 constant, then return NULL_TREE. */
13152 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13154 int result, invert;
13156 /* From here on, the only cases we handle are when the result is
13157 known to be a constant. */
13159 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13161 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13162 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13164 /* Handle the cases where either operand is a NaN. */
13165 if (real_isnan (c0) || real_isnan (c1))
13175 case UNORDERED_EXPR:
13189 if (flag_trapping_math)
13195 gcc_unreachable ();
13198 return constant_boolean_node (result, type);
13201 return constant_boolean_node (real_compare (code, c0, c1), type);
13204 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13206 To compute GT, swap the arguments and do LT.
13207 To compute GE, do LT and invert the result.
13208 To compute LE, swap the arguments, do LT and invert the result.
13209 To compute NE, do EQ and invert the result.
13211 Therefore, the code below must handle only EQ and LT. */
13213 if (code == LE_EXPR || code == GT_EXPR)
13218 code = swap_tree_comparison (code);
13221 /* Note that it is safe to invert for real values here because we
13222 have already handled the one case that it matters. */
13225 if (code == NE_EXPR || code == GE_EXPR)
13228 code = invert_tree_comparison (code, false);
13231 /* Compute a result for LT or EQ if args permit;
13232 Otherwise return T. */
13233 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13235 if (code == EQ_EXPR)
13236 result = tree_int_cst_equal (op0, op1);
13237 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13238 result = INT_CST_LT_UNSIGNED (op0, op1);
13240 result = INT_CST_LT (op0, op1);
13247 return constant_boolean_node (result, type);
13250 /* Build an expression for the a clean point containing EXPR with type TYPE.
13251 Don't build a cleanup point expression for EXPR which don't have side
13255 fold_build_cleanup_point_expr (tree type, tree expr)
13257 /* If the expression does not have side effects then we don't have to wrap
13258 it with a cleanup point expression. */
13259 if (!TREE_SIDE_EFFECTS (expr))
13262 /* If the expression is a return, check to see if the expression inside the
13263 return has no side effects or the right hand side of the modify expression
13264 inside the return. If either don't have side effects set we don't need to
13265 wrap the expression in a cleanup point expression. Note we don't check the
13266 left hand side of the modify because it should always be a return decl. */
13267 if (TREE_CODE (expr) == RETURN_EXPR)
13269 tree op = TREE_OPERAND (expr, 0);
13270 if (!op || !TREE_SIDE_EFFECTS (op))
13272 op = TREE_OPERAND (op, 1);
13273 if (!TREE_SIDE_EFFECTS (op))
13277 return build1 (CLEANUP_POINT_EXPR, type, expr);
13280 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13281 avoid confusing the gimplify process. */
13284 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13286 /* The size of the object is not relevant when talking about its address. */
13287 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13288 t = TREE_OPERAND (t, 0);
13290 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13291 if (TREE_CODE (t) == INDIRECT_REF
13292 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13294 t = TREE_OPERAND (t, 0);
13295 if (TREE_TYPE (t) != ptrtype)
13296 t = build1 (NOP_EXPR, ptrtype, t);
13302 while (handled_component_p (base))
13303 base = TREE_OPERAND (base, 0);
13305 TREE_ADDRESSABLE (base) = 1;
13307 t = build1 (ADDR_EXPR, ptrtype, t);
13314 build_fold_addr_expr (tree t)
13316 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13319 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13320 of an indirection through OP0, or NULL_TREE if no simplification is
13324 fold_indirect_ref_1 (tree type, tree op0)
13330 subtype = TREE_TYPE (sub);
13331 if (!POINTER_TYPE_P (subtype))
13334 if (TREE_CODE (sub) == ADDR_EXPR)
13336 tree op = TREE_OPERAND (sub, 0);
13337 tree optype = TREE_TYPE (op);
13338 /* *&CONST_DECL -> to the value of the const decl. */
13339 if (TREE_CODE (op) == CONST_DECL)
13340 return DECL_INITIAL (op);
13341 /* *&p => p; make sure to handle *&"str"[cst] here. */
13342 if (type == optype)
13344 tree fop = fold_read_from_constant_string (op);
13350 /* *(foo *)&fooarray => fooarray[0] */
13351 else if (TREE_CODE (optype) == ARRAY_TYPE
13352 && type == TREE_TYPE (optype))
13354 tree type_domain = TYPE_DOMAIN (optype);
13355 tree min_val = size_zero_node;
13356 if (type_domain && TYPE_MIN_VALUE (type_domain))
13357 min_val = TYPE_MIN_VALUE (type_domain);
13358 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13360 /* *(foo *)&complexfoo => __real__ complexfoo */
13361 else if (TREE_CODE (optype) == COMPLEX_TYPE
13362 && type == TREE_TYPE (optype))
13363 return fold_build1 (REALPART_EXPR, type, op);
13364 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13365 else if (TREE_CODE (optype) == VECTOR_TYPE
13366 && type == TREE_TYPE (optype))
13368 tree part_width = TYPE_SIZE (type);
13369 tree index = bitsize_int (0);
13370 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13374 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13375 if (TREE_CODE (sub) == PLUS_EXPR
13376 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13378 tree op00 = TREE_OPERAND (sub, 0);
13379 tree op01 = TREE_OPERAND (sub, 1);
13383 op00type = TREE_TYPE (op00);
13384 if (TREE_CODE (op00) == ADDR_EXPR
13385 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13386 && type == TREE_TYPE (TREE_TYPE (op00type)))
13388 tree size = TYPE_SIZE_UNIT (type);
13389 if (tree_int_cst_equal (size, op01))
13390 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13394 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13395 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13396 && type == TREE_TYPE (TREE_TYPE (subtype)))
13399 tree min_val = size_zero_node;
13400 sub = build_fold_indirect_ref (sub);
13401 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13402 if (type_domain && TYPE_MIN_VALUE (type_domain))
13403 min_val = TYPE_MIN_VALUE (type_domain);
13404 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13410 /* Builds an expression for an indirection through T, simplifying some
13414 build_fold_indirect_ref (tree t)
13416 tree type = TREE_TYPE (TREE_TYPE (t));
13417 tree sub = fold_indirect_ref_1 (type, t);
13422 return build1 (INDIRECT_REF, type, t);
13425 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13428 fold_indirect_ref (tree t)
13430 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13438 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13439 whose result is ignored. The type of the returned tree need not be
13440 the same as the original expression. */
13443 fold_ignored_result (tree t)
13445 if (!TREE_SIDE_EFFECTS (t))
13446 return integer_zero_node;
13449 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13452 t = TREE_OPERAND (t, 0);
13456 case tcc_comparison:
13457 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13458 t = TREE_OPERAND (t, 0);
13459 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13460 t = TREE_OPERAND (t, 1);
13465 case tcc_expression:
13466 switch (TREE_CODE (t))
13468 case COMPOUND_EXPR:
13469 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13471 t = TREE_OPERAND (t, 0);
13475 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13476 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13478 t = TREE_OPERAND (t, 0);
13491 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13492 This can only be applied to objects of a sizetype. */
13495 round_up (tree value, int divisor)
13497 tree div = NULL_TREE;
13499 gcc_assert (divisor > 0);
13503 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13504 have to do anything. Only do this when we are not given a const,
13505 because in that case, this check is more expensive than just
13507 if (TREE_CODE (value) != INTEGER_CST)
13509 div = build_int_cst (TREE_TYPE (value), divisor);
13511 if (multiple_of_p (TREE_TYPE (value), value, div))
13515 /* If divisor is a power of two, simplify this to bit manipulation. */
13516 if (divisor == (divisor & -divisor))
13520 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13521 value = size_binop (PLUS_EXPR, value, t);
13522 t = build_int_cst (TREE_TYPE (value), -divisor);
13523 value = size_binop (BIT_AND_EXPR, value, t);
13528 div = build_int_cst (TREE_TYPE (value), divisor);
13529 value = size_binop (CEIL_DIV_EXPR, value, div);
13530 value = size_binop (MULT_EXPR, value, div);
13536 /* Likewise, but round down. */
13539 round_down (tree value, int divisor)
13541 tree div = NULL_TREE;
13543 gcc_assert (divisor > 0);
13547 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13548 have to do anything. Only do this when we are not given a const,
13549 because in that case, this check is more expensive than just
13551 if (TREE_CODE (value) != INTEGER_CST)
13553 div = build_int_cst (TREE_TYPE (value), divisor);
13555 if (multiple_of_p (TREE_TYPE (value), value, div))
13559 /* If divisor is a power of two, simplify this to bit manipulation. */
13560 if (divisor == (divisor & -divisor))
13564 t = build_int_cst (TREE_TYPE (value), -divisor);
13565 value = size_binop (BIT_AND_EXPR, value, t);
13570 div = build_int_cst (TREE_TYPE (value), divisor);
13571 value = size_binop (FLOOR_DIV_EXPR, value, div);
13572 value = size_binop (MULT_EXPR, value, div);
13578 /* Returns the pointer to the base of the object addressed by EXP and
13579 extracts the information about the offset of the access, storing it
13580 to PBITPOS and POFFSET. */
13583 split_address_to_core_and_offset (tree exp,
13584 HOST_WIDE_INT *pbitpos, tree *poffset)
13587 enum machine_mode mode;
13588 int unsignedp, volatilep;
13589 HOST_WIDE_INT bitsize;
13591 if (TREE_CODE (exp) == ADDR_EXPR)
13593 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13594 poffset, &mode, &unsignedp, &volatilep,
13596 core = build_fold_addr_expr (core);
13602 *poffset = NULL_TREE;
13608 /* Returns true if addresses of E1 and E2 differ by a constant, false
13609 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13612 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13615 HOST_WIDE_INT bitpos1, bitpos2;
13616 tree toffset1, toffset2, tdiff, type;
13618 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13619 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13621 if (bitpos1 % BITS_PER_UNIT != 0
13622 || bitpos2 % BITS_PER_UNIT != 0
13623 || !operand_equal_p (core1, core2, 0))
13626 if (toffset1 && toffset2)
13628 type = TREE_TYPE (toffset1);
13629 if (type != TREE_TYPE (toffset2))
13630 toffset2 = fold_convert (type, toffset2);
13632 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13633 if (!cst_and_fits_in_hwi (tdiff))
13636 *diff = int_cst_value (tdiff);
13638 else if (toffset1 || toffset2)
13640 /* If only one of the offsets is non-constant, the difference cannot
13647 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13651 /* Simplify the floating point expression EXP when the sign of the
13652 result is not significant. Return NULL_TREE if no simplification
13656 fold_strip_sign_ops (tree exp)
13660 switch (TREE_CODE (exp))
13664 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13665 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13669 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13671 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13672 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13673 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13674 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13675 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13676 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13679 case COMPOUND_EXPR:
13680 arg0 = TREE_OPERAND (exp, 0);
13681 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13683 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13687 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13688 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13690 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13691 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13692 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13697 const enum built_in_function fcode = builtin_mathfn_code (exp);
13700 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13701 /* Strip copysign function call, return the 1st argument. */
13702 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13703 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13704 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13707 /* Strip sign ops from the argument of "odd" math functions. */
13708 if (negate_mathfn_p (fcode))
13710 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13712 return build_function_call_expr (get_callee_fndecl (exp),
13713 build_tree_list (NULL_TREE,