1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
64 #include "langhooks.h"
67 /* Nonzero if we are folding constants inside an initializer; zero
69 int folding_initializer = 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code {
93 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
94 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static tree combine_comparisons (enum tree_code, enum tree_code,
104 enum tree_code, tree, tree, tree);
105 static int truth_value_p (enum tree_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, int, int, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static int all_ones_mask_p (tree, int);
117 static tree sign_bit_p (tree, tree);
118 static int simple_operand_p (tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
205 int sign_extended_type;
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
211 prec = TYPE_PRECISION (type);
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 else if (prec == HOST_BITS_PER_WIDE_INT)
244 if ((HOST_WIDE_INT)l1 < 0)
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
284 int sign_extended_type;
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
292 overflow = fit_double_type (low, high, &low, &high, type);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
299 || (overflowable > 0 && sign_extended_type))
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
326 unsigned HOST_WIDE_INT l;
330 h = h1 + h2 + (l < l1);
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
354 return (*hv & h1) < 0;
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
387 memset (prod, 0, sizeof prod);
389 for (i = 0; i < 4; i++)
392 for (j = 0; j < 4; j++)
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
408 /* Unsigned overflow is immediate. */
410 return (toplow | tophigh) != 0;
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 unsigned HOST_WIDE_INT signmask;
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 if (SHIFT_COUNT_TRUNCATED)
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
456 else if (count >= HOST_BITS_PER_WIDE_INT)
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 /* Sign extend all bits that are beyond the precision. */
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
501 unsigned HOST_WIDE_INT signmask;
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
507 if (SHIFT_COUNT_TRUNCATED)
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
517 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
529 /* Zero / sign extend all bits that are beyond the precision. */
531 if (count >= (HOST_WIDE_INT)prec)
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
630 /* Calculate quotient sign and convert operands to unsigned. */
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
644 neg_double (lden, hden, &lden, &hden);
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
651 /* This unsigned division rounds toward zero. */
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
665 memset (quo, 0, sizeof quo);
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
702 scale = BASE / (den[den_hi_sig] + 1);
704 { /* scale divisor and dividend */
706 for (i = 0; i <= 4 - 1; i++)
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
715 for (i = 0; i <= 4 - 1; i++)
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
753 for (j = 0; j <= den_hi_sig; j++)
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
775 num [num_hi_sig] += carry;
778 /* Store the quotient digit. */
783 decode (quo, lquo, hquo);
786 /* If result is negative, make it so. */
788 neg_double (*lquo, *hquo, lquo, hquo);
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 case ROUND_MOD_EXPR: /* round to closest integer */
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
833 /* Get absolute values. */
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 neg_double (lden, hden, &labs_den, &habs_den);
839 /* If (2 * abs (lrem) >= abs (lden)) */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, <wice, &htwice);
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den < ltwice)))
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
879 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
909 return build_int_cst_wide (type, quol, quoh);
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
921 static int fold_deferring_overflow_warnings;
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
928 static const char* fold_deferred_overflow_warning;
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
939 fold_defer_overflow_warnings (void)
941 ++fold_deferring_overflow_warnings;
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
954 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
963 if (fold_deferred_overflow_warning != NULL
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = code;
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
973 if (!issue || warnmsg == NULL)
976 /* Use the smallest code level when deciding to issue the
978 if (code == 0 || code > (int) fold_deferred_overflow_code)
979 code = fold_deferred_overflow_code;
981 if (!issue_strict_overflow_warning (code))
984 if (stmt == NULL_TREE || !expr_has_location (stmt))
985 locus = input_location;
987 locus = expr_location (stmt);
988 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
991 /* Stop deferring overflow warnings, ignoring any deferred
995 fold_undefer_and_ignore_overflow_warnings (void)
997 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1000 /* Whether we are deferring overflow warnings. */
1003 fold_deferring_overflow_warnings_p (void)
1005 return fold_deferring_overflow_warnings > 0;
1008 /* This is called when we fold something based on the fact that signed
1009 overflow is undefined. */
1012 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1014 gcc_assert (!flag_wrapv && !flag_trapv);
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1032 negate_mathfn_p (enum built_in_function code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1073 /* Check whether we may negate an integer constant T without causing
1077 may_negate_without_overflow_p (tree t)
1079 unsigned HOST_WIDE_INT val;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1108 negate_expr_p (tree t)
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1121 if (TYPE_OVERFLOW_WRAPS (type))
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1135 return negate_expr_p (TREE_REALPART (t))
1136 && negate_expr_p (TREE_IMAGPART (t));
1139 return negate_expr_p (TREE_OPERAND (t, 0))
1140 && negate_expr_p (TREE_OPERAND (t, 1));
1143 return negate_expr_p (TREE_OPERAND (t, 0));
1146 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1147 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1149 /* -(A + B) -> (-B) - A. */
1150 if (negate_expr_p (TREE_OPERAND (t, 1))
1151 && reorder_operands_p (TREE_OPERAND (t, 0),
1152 TREE_OPERAND (t, 1)))
1154 /* -(A + B) -> (-A) - B. */
1155 return negate_expr_p (TREE_OPERAND (t, 0));
1158 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1159 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1160 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1161 && reorder_operands_p (TREE_OPERAND (t, 0),
1162 TREE_OPERAND (t, 1));
1165 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1171 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1172 return negate_expr_p (TREE_OPERAND (t, 1))
1173 || negate_expr_p (TREE_OPERAND (t, 0));
1176 case TRUNC_DIV_EXPR:
1177 case ROUND_DIV_EXPR:
1178 case FLOOR_DIV_EXPR:
1180 case EXACT_DIV_EXPR:
1181 /* In general we can't negate A / B, because if A is INT_MIN and
1182 B is 1, we may turn this into INT_MIN / -1 which is undefined
1183 and actually traps on some architectures. But if overflow is
1184 undefined, we can negate, because - (INT_MIN / 1) is an
1186 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1187 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1189 return negate_expr_p (TREE_OPERAND (t, 1))
1190 || negate_expr_p (TREE_OPERAND (t, 0));
1193 /* Negate -((double)float) as (double)(-float). */
1194 if (TREE_CODE (type) == REAL_TYPE)
1196 tree tem = strip_float_extensions (t);
1198 return negate_expr_p (tem);
1203 /* Negate -f(x) as f(-x). */
1204 if (negate_mathfn_p (builtin_mathfn_code (t)))
1205 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1209 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1210 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1212 tree op1 = TREE_OPERAND (t, 1);
1213 if (TREE_INT_CST_HIGH (op1) == 0
1214 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1215 == TREE_INT_CST_LOW (op1))
1226 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1227 simplification is possible.
1228 If negate_expr_p would return true for T, NULL_TREE will never be
1232 fold_negate_expr (tree t)
1234 tree type = TREE_TYPE (t);
1237 switch (TREE_CODE (t))
1239 /* Convert - (~A) to A + 1. */
1241 if (INTEGRAL_TYPE_P (type))
1242 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1243 build_int_cst (type, 1));
1247 tem = fold_negate_const (t, type);
1248 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1249 || !TYPE_OVERFLOW_TRAPS (type))
1254 tem = fold_negate_const (t, type);
1255 /* Two's complement FP formats, such as c4x, may overflow. */
1256 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1262 tree rpart = negate_expr (TREE_REALPART (t));
1263 tree ipart = negate_expr (TREE_IMAGPART (t));
1265 if ((TREE_CODE (rpart) == REAL_CST
1266 && TREE_CODE (ipart) == REAL_CST)
1267 || (TREE_CODE (rpart) == INTEGER_CST
1268 && TREE_CODE (ipart) == INTEGER_CST))
1269 return build_complex (type, rpart, ipart);
1274 if (negate_expr_p (t))
1275 return fold_build2 (COMPLEX_EXPR, type,
1276 fold_negate_expr (TREE_OPERAND (t, 0)),
1277 fold_negate_expr (TREE_OPERAND (t, 1)));
1281 if (negate_expr_p (t))
1282 return fold_build1 (CONJ_EXPR, type,
1283 fold_negate_expr (TREE_OPERAND (t, 0)));
1287 return TREE_OPERAND (t, 0);
1290 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1291 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1293 /* -(A + B) -> (-B) - A. */
1294 if (negate_expr_p (TREE_OPERAND (t, 1))
1295 && reorder_operands_p (TREE_OPERAND (t, 0),
1296 TREE_OPERAND (t, 1)))
1298 tem = negate_expr (TREE_OPERAND (t, 1));
1299 return fold_build2 (MINUS_EXPR, type,
1300 tem, TREE_OPERAND (t, 0));
1303 /* -(A + B) -> (-A) - B. */
1304 if (negate_expr_p (TREE_OPERAND (t, 0)))
1306 tem = negate_expr (TREE_OPERAND (t, 0));
1307 return fold_build2 (MINUS_EXPR, type,
1308 tem, TREE_OPERAND (t, 1));
1314 /* - (A - B) -> B - A */
1315 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1316 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1317 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1318 return fold_build2 (MINUS_EXPR, type,
1319 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1323 if (TYPE_UNSIGNED (type))
1329 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1331 tem = TREE_OPERAND (t, 1);
1332 if (negate_expr_p (tem))
1333 return fold_build2 (TREE_CODE (t), type,
1334 TREE_OPERAND (t, 0), negate_expr (tem));
1335 tem = TREE_OPERAND (t, 0);
1336 if (negate_expr_p (tem))
1337 return fold_build2 (TREE_CODE (t), type,
1338 negate_expr (tem), TREE_OPERAND (t, 1));
1342 case TRUNC_DIV_EXPR:
1343 case ROUND_DIV_EXPR:
1344 case FLOOR_DIV_EXPR:
1346 case EXACT_DIV_EXPR:
1347 /* In general we can't negate A / B, because if A is INT_MIN and
1348 B is 1, we may turn this into INT_MIN / -1 which is undefined
1349 and actually traps on some architectures. But if overflow is
1350 undefined, we can negate, because - (INT_MIN / 1) is an
1352 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1354 const char * const warnmsg = G_("assuming signed overflow does not "
1355 "occur when negating a division");
1356 tem = TREE_OPERAND (t, 1);
1357 if (negate_expr_p (tem))
1359 if (INTEGRAL_TYPE_P (type)
1360 && (TREE_CODE (tem) != INTEGER_CST
1361 || integer_onep (tem)))
1362 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1363 return fold_build2 (TREE_CODE (t), type,
1364 TREE_OPERAND (t, 0), negate_expr (tem));
1366 tem = TREE_OPERAND (t, 0);
1367 if (negate_expr_p (tem))
1369 if (INTEGRAL_TYPE_P (type)
1370 && (TREE_CODE (tem) != INTEGER_CST
1371 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1372 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373 return fold_build2 (TREE_CODE (t), type,
1374 negate_expr (tem), TREE_OPERAND (t, 1));
1380 /* Convert -((double)float) into (double)(-float). */
1381 if (TREE_CODE (type) == REAL_TYPE)
1383 tem = strip_float_extensions (t);
1384 if (tem != t && negate_expr_p (tem))
1385 return negate_expr (tem);
1390 /* Negate -f(x) as f(-x). */
1391 if (negate_mathfn_p (builtin_mathfn_code (t))
1392 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1396 fndecl = get_callee_fndecl (t);
1397 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1398 return build_call_expr (fndecl, 1, arg);
1403 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1404 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1406 tree op1 = TREE_OPERAND (t, 1);
1407 if (TREE_INT_CST_HIGH (op1) == 0
1408 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1409 == TREE_INT_CST_LOW (op1))
1411 tree ntype = TYPE_UNSIGNED (type)
1412 ? signed_type_for (type)
1413 : unsigned_type_for (type);
1414 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1415 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1416 return fold_convert (type, temp);
1428 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1429 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1430 return NULL_TREE. */
1433 negate_expr (tree t)
1440 type = TREE_TYPE (t);
1441 STRIP_SIGN_NOPS (t);
1443 tem = fold_negate_expr (t);
1445 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1446 return fold_convert (type, tem);
1449 /* Split a tree IN into a constant, literal and variable parts that could be
1450 combined with CODE to make IN. "constant" means an expression with
1451 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1452 commutative arithmetic operation. Store the constant part into *CONP,
1453 the literal in *LITP and return the variable part. If a part isn't
1454 present, set it to null. If the tree does not decompose in this way,
1455 return the entire tree as the variable part and the other parts as null.
1457 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1458 case, we negate an operand that was subtracted. Except if it is a
1459 literal for which we use *MINUS_LITP instead.
1461 If NEGATE_P is true, we are negating all of IN, again except a literal
1462 for which we use *MINUS_LITP instead.
1464 If IN is itself a literal or constant, return it as appropriate.
1466 Note that we do not guarantee that any of the three values will be the
1467 same type as IN, but they will have the same signedness and mode. */
1470 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1471 tree *minus_litp, int negate_p)
1479 /* Strip any conversions that don't change the machine mode or signedness. */
1480 STRIP_SIGN_NOPS (in);
1482 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1484 else if (TREE_CODE (in) == code
1485 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1486 /* We can associate addition and subtraction together (even
1487 though the C standard doesn't say so) for integers because
1488 the value is not affected. For reals, the value might be
1489 affected, so we can't. */
1490 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1491 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1493 tree op0 = TREE_OPERAND (in, 0);
1494 tree op1 = TREE_OPERAND (in, 1);
1495 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1496 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1498 /* First see if either of the operands is a literal, then a constant. */
1499 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1500 *litp = op0, op0 = 0;
1501 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1502 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1504 if (op0 != 0 && TREE_CONSTANT (op0))
1505 *conp = op0, op0 = 0;
1506 else if (op1 != 0 && TREE_CONSTANT (op1))
1507 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1509 /* If we haven't dealt with either operand, this is not a case we can
1510 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1511 if (op0 != 0 && op1 != 0)
1516 var = op1, neg_var_p = neg1_p;
1518 /* Now do any needed negations. */
1520 *minus_litp = *litp, *litp = 0;
1522 *conp = negate_expr (*conp);
1524 var = negate_expr (var);
1526 else if (TREE_CONSTANT (in))
1534 *minus_litp = *litp, *litp = 0;
1535 else if (*minus_litp)
1536 *litp = *minus_litp, *minus_litp = 0;
1537 *conp = negate_expr (*conp);
1538 var = negate_expr (var);
1544 /* Re-associate trees split by the above function. T1 and T2 are either
1545 expressions to associate or null. Return the new expression, if any. If
1546 we build an operation, do it in TYPE and with CODE. */
1549 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1556 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1557 try to fold this since we will have infinite recursion. But do
1558 deal with any NEGATE_EXPRs. */
1559 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1560 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1562 if (code == PLUS_EXPR)
1564 if (TREE_CODE (t1) == NEGATE_EXPR)
1565 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1566 fold_convert (type, TREE_OPERAND (t1, 0)));
1567 else if (TREE_CODE (t2) == NEGATE_EXPR)
1568 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1569 fold_convert (type, TREE_OPERAND (t2, 0)));
1570 else if (integer_zerop (t2))
1571 return fold_convert (type, t1);
1573 else if (code == MINUS_EXPR)
1575 if (integer_zerop (t2))
1576 return fold_convert (type, t1);
1579 return build2 (code, type, fold_convert (type, t1),
1580 fold_convert (type, t2));
1583 return fold_build2 (code, type, fold_convert (type, t1),
1584 fold_convert (type, t2));
1587 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1588 for use in int_const_binop, size_binop and size_diffop. */
1591 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1593 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1595 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1610 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1611 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1612 && TYPE_MODE (type1) == TYPE_MODE (type2);
1616 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1617 to produce a new constant. Return NULL_TREE if we don't know how
1618 to evaluate CODE at compile-time.
1620 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1623 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1625 unsigned HOST_WIDE_INT int1l, int2l;
1626 HOST_WIDE_INT int1h, int2h;
1627 unsigned HOST_WIDE_INT low;
1629 unsigned HOST_WIDE_INT garbagel;
1630 HOST_WIDE_INT garbageh;
1632 tree type = TREE_TYPE (arg1);
1633 int uns = TYPE_UNSIGNED (type);
1635 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1638 int1l = TREE_INT_CST_LOW (arg1);
1639 int1h = TREE_INT_CST_HIGH (arg1);
1640 int2l = TREE_INT_CST_LOW (arg2);
1641 int2h = TREE_INT_CST_HIGH (arg2);
1646 low = int1l | int2l, hi = int1h | int2h;
1650 low = int1l ^ int2l, hi = int1h ^ int2h;
1654 low = int1l & int2l, hi = int1h & int2h;
1660 /* It's unclear from the C standard whether shifts can overflow.
1661 The following code ignores overflow; perhaps a C standard
1662 interpretation ruling is needed. */
1663 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1670 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1675 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1679 neg_double (int2l, int2h, &low, &hi);
1680 add_double (int1l, int1h, low, hi, &low, &hi);
1681 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1685 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1688 case TRUNC_DIV_EXPR:
1689 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1690 case EXACT_DIV_EXPR:
1691 /* This is a shortcut for a common special case. */
1692 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1693 && !TREE_OVERFLOW (arg1)
1694 && !TREE_OVERFLOW (arg2)
1695 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1697 if (code == CEIL_DIV_EXPR)
1700 low = int1l / int2l, hi = 0;
1704 /* ... fall through ... */
1706 case ROUND_DIV_EXPR:
1707 if (int2h == 0 && int2l == 0)
1709 if (int2h == 0 && int2l == 1)
1711 low = int1l, hi = int1h;
1714 if (int1l == int2l && int1h == int2h
1715 && ! (int1l == 0 && int1h == 0))
1720 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1721 &low, &hi, &garbagel, &garbageh);
1724 case TRUNC_MOD_EXPR:
1725 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1726 /* This is a shortcut for a common special case. */
1727 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1728 && !TREE_OVERFLOW (arg1)
1729 && !TREE_OVERFLOW (arg2)
1730 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1732 if (code == CEIL_MOD_EXPR)
1734 low = int1l % int2l, hi = 0;
1738 /* ... fall through ... */
1740 case ROUND_MOD_EXPR:
1741 if (int2h == 0 && int2l == 0)
1743 overflow = div_and_round_double (code, uns,
1744 int1l, int1h, int2l, int2h,
1745 &garbagel, &garbageh, &low, &hi);
1751 low = (((unsigned HOST_WIDE_INT) int1h
1752 < (unsigned HOST_WIDE_INT) int2h)
1753 || (((unsigned HOST_WIDE_INT) int1h
1754 == (unsigned HOST_WIDE_INT) int2h)
1757 low = (int1h < int2h
1758 || (int1h == int2h && int1l < int2l));
1760 if (low == (code == MIN_EXPR))
1761 low = int1l, hi = int1h;
1763 low = int2l, hi = int2h;
1772 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1774 /* Propagate overflow flags ourselves. */
1775 if (((!uns || is_sizetype) && overflow)
1776 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1779 TREE_OVERFLOW (t) = 1;
1783 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1784 ((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1790 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1791 constant. We assume ARG1 and ARG2 have the same data type, or at least
1792 are the same kind of constant and the same machine mode. Return zero if
1793 combining the constants is not allowed in the current operating mode.
1795 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1798 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1800 /* Sanity check for the recursive cases. */
1807 if (TREE_CODE (arg1) == INTEGER_CST)
1808 return int_const_binop (code, arg1, arg2, notrunc);
1810 if (TREE_CODE (arg1) == REAL_CST)
1812 enum machine_mode mode;
1815 REAL_VALUE_TYPE value;
1816 REAL_VALUE_TYPE result;
1820 /* The following codes are handled by real_arithmetic. */
1835 d1 = TREE_REAL_CST (arg1);
1836 d2 = TREE_REAL_CST (arg2);
1838 type = TREE_TYPE (arg1);
1839 mode = TYPE_MODE (type);
1841 /* Don't perform operation if we honor signaling NaNs and
1842 either operand is a NaN. */
1843 if (HONOR_SNANS (mode)
1844 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1847 /* Don't perform operation if it would raise a division
1848 by zero exception. */
1849 if (code == RDIV_EXPR
1850 && REAL_VALUES_EQUAL (d2, dconst0)
1851 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1854 /* If either operand is a NaN, just return it. Otherwise, set up
1855 for floating-point trap; we return an overflow. */
1856 if (REAL_VALUE_ISNAN (d1))
1858 else if (REAL_VALUE_ISNAN (d2))
1861 inexact = real_arithmetic (&value, code, &d1, &d2);
1862 real_convert (&result, mode, &value);
1864 /* Don't constant fold this floating point operation if
1865 the result has overflowed and flag_trapping_math. */
1866 if (flag_trapping_math
1867 && MODE_HAS_INFINITIES (mode)
1868 && REAL_VALUE_ISINF (result)
1869 && !REAL_VALUE_ISINF (d1)
1870 && !REAL_VALUE_ISINF (d2))
1873 /* Don't constant fold this floating point operation if the
1874 result may dependent upon the run-time rounding mode and
1875 flag_rounding_math is set, or if GCC's software emulation
1876 is unable to accurately represent the result. */
1877 if ((flag_rounding_math
1878 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1879 && !flag_unsafe_math_optimizations))
1880 && (inexact || !real_identical (&result, &value)))
1883 t = build_real (type, result);
1885 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1889 if (TREE_CODE (arg1) == COMPLEX_CST)
1891 tree type = TREE_TYPE (arg1);
1892 tree r1 = TREE_REALPART (arg1);
1893 tree i1 = TREE_IMAGPART (arg1);
1894 tree r2 = TREE_REALPART (arg2);
1895 tree i2 = TREE_IMAGPART (arg2);
1902 real = const_binop (code, r1, r2, notrunc);
1903 imag = const_binop (code, i1, i2, notrunc);
1907 real = const_binop (MINUS_EXPR,
1908 const_binop (MULT_EXPR, r1, r2, notrunc),
1909 const_binop (MULT_EXPR, i1, i2, notrunc),
1911 imag = const_binop (PLUS_EXPR,
1912 const_binop (MULT_EXPR, r1, i2, notrunc),
1913 const_binop (MULT_EXPR, i1, r2, notrunc),
1920 = const_binop (PLUS_EXPR,
1921 const_binop (MULT_EXPR, r2, r2, notrunc),
1922 const_binop (MULT_EXPR, i2, i2, notrunc),
1925 = const_binop (PLUS_EXPR,
1926 const_binop (MULT_EXPR, r1, r2, notrunc),
1927 const_binop (MULT_EXPR, i1, i2, notrunc),
1930 = const_binop (MINUS_EXPR,
1931 const_binop (MULT_EXPR, i1, r2, notrunc),
1932 const_binop (MULT_EXPR, r1, i2, notrunc),
1935 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1936 code = TRUNC_DIV_EXPR;
1938 real = const_binop (code, t1, magsquared, notrunc);
1939 imag = const_binop (code, t2, magsquared, notrunc);
1948 return build_complex (type, real, imag);
1954 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1955 indicates which particular sizetype to create. */
1958 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1960 return build_int_cst (sizetype_tab[(int) kind], number);
1963 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1964 is a tree code. The type of the result is taken from the operands.
1965 Both must be equivalent integer types, ala int_binop_types_match_p.
1966 If the operands are constant, so is the result. */
1969 size_binop (enum tree_code code, tree arg0, tree arg1)
1971 tree type = TREE_TYPE (arg0);
1973 if (arg0 == error_mark_node || arg1 == error_mark_node)
1974 return error_mark_node;
1976 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1979 /* Handle the special case of two integer constants faster. */
1980 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1982 /* And some specific cases even faster than that. */
1983 if (code == PLUS_EXPR)
1985 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1987 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1990 else if (code == MINUS_EXPR)
1992 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1995 else if (code == MULT_EXPR)
1997 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2001 /* Handle general case of two integer constants. */
2002 return int_const_binop (code, arg0, arg1, 0);
2005 return fold_build2 (code, type, arg0, arg1);
2008 /* Given two values, either both of sizetype or both of bitsizetype,
2009 compute the difference between the two values. Return the value
2010 in signed type corresponding to the type of the operands. */
2013 size_diffop (tree arg0, tree arg1)
2015 tree type = TREE_TYPE (arg0);
2018 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2021 /* If the type is already signed, just do the simple thing. */
2022 if (!TYPE_UNSIGNED (type))
2023 return size_binop (MINUS_EXPR, arg0, arg1);
2025 if (type == sizetype)
2027 else if (type == bitsizetype)
2028 ctype = sbitsizetype;
2030 ctype = signed_type_for (type);
2032 /* If either operand is not a constant, do the conversions to the signed
2033 type and subtract. The hardware will do the right thing with any
2034 overflow in the subtraction. */
2035 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2036 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2037 fold_convert (ctype, arg1));
2039 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2040 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2041 overflow) and negate (which can't either). Special-case a result
2042 of zero while we're here. */
2043 if (tree_int_cst_equal (arg0, arg1))
2044 return build_int_cst (ctype, 0);
2045 else if (tree_int_cst_lt (arg1, arg0))
2046 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2048 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2049 fold_convert (ctype, size_binop (MINUS_EXPR,
2053 /* A subroutine of fold_convert_const handling conversions of an
2054 INTEGER_CST to another integer type. */
2057 fold_convert_const_int_from_int (tree type, tree arg1)
2061 /* Given an integer constant, make new constant with new type,
2062 appropriately sign-extended or truncated. */
2063 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2064 TREE_INT_CST_HIGH (arg1),
2065 /* Don't set the overflow when
2066 converting a pointer */
2067 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2068 (TREE_INT_CST_HIGH (arg1) < 0
2069 && (TYPE_UNSIGNED (type)
2070 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2071 | TREE_OVERFLOW (arg1));
2076 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2077 to an integer type. */
2080 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2085 /* The following code implements the floating point to integer
2086 conversion rules required by the Java Language Specification,
2087 that IEEE NaNs are mapped to zero and values that overflow
2088 the target precision saturate, i.e. values greater than
2089 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2090 are mapped to INT_MIN. These semantics are allowed by the
2091 C and C++ standards that simply state that the behavior of
2092 FP-to-integer conversion is unspecified upon overflow. */
2094 HOST_WIDE_INT high, low;
2096 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2100 case FIX_TRUNC_EXPR:
2101 real_trunc (&r, VOIDmode, &x);
2108 /* If R is NaN, return zero and show we have an overflow. */
2109 if (REAL_VALUE_ISNAN (r))
2116 /* See if R is less than the lower bound or greater than the
2121 tree lt = TYPE_MIN_VALUE (type);
2122 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2123 if (REAL_VALUES_LESS (r, l))
2126 high = TREE_INT_CST_HIGH (lt);
2127 low = TREE_INT_CST_LOW (lt);
2133 tree ut = TYPE_MAX_VALUE (type);
2136 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2137 if (REAL_VALUES_LESS (u, r))
2140 high = TREE_INT_CST_HIGH (ut);
2141 low = TREE_INT_CST_LOW (ut);
2147 REAL_VALUE_TO_INT (&low, &high, r);
2149 t = force_fit_type_double (type, low, high, -1,
2150 overflow | TREE_OVERFLOW (arg1));
2154 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2155 to another floating point type. */
2158 fold_convert_const_real_from_real (tree type, tree arg1)
2160 REAL_VALUE_TYPE value;
2163 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2164 t = build_real (type, value);
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2170 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2171 type TYPE. If no simplification can be done return NULL_TREE. */
2174 fold_convert_const (enum tree_code code, tree type, tree arg1)
2176 if (TREE_TYPE (arg1) == type)
2179 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2181 if (TREE_CODE (arg1) == INTEGER_CST)
2182 return fold_convert_const_int_from_int (type, arg1);
2183 else if (TREE_CODE (arg1) == REAL_CST)
2184 return fold_convert_const_int_from_real (code, type, arg1);
2186 else if (TREE_CODE (type) == REAL_TYPE)
2188 if (TREE_CODE (arg1) == INTEGER_CST)
2189 return build_real_from_int_cst (type, arg1);
2190 if (TREE_CODE (arg1) == REAL_CST)
2191 return fold_convert_const_real_from_real (type, arg1);
2196 /* Construct a vector of zero elements of vector type TYPE. */
2199 build_zero_vector (tree type)
2204 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2205 units = TYPE_VECTOR_SUBPARTS (type);
2208 for (i = 0; i < units; i++)
2209 list = tree_cons (NULL_TREE, elem, list);
2210 return build_vector (type, list);
2213 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2216 fold_convertible_p (tree type, tree arg)
2218 tree orig = TREE_TYPE (arg);
2223 if (TREE_CODE (arg) == ERROR_MARK
2224 || TREE_CODE (type) == ERROR_MARK
2225 || TREE_CODE (orig) == ERROR_MARK)
2228 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2231 switch (TREE_CODE (type))
2233 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2234 case POINTER_TYPE: case REFERENCE_TYPE:
2236 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2237 || TREE_CODE (orig) == OFFSET_TYPE)
2239 return (TREE_CODE (orig) == VECTOR_TYPE
2240 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2243 return TREE_CODE (type) == TREE_CODE (orig);
2247 /* Convert expression ARG to type TYPE. Used by the middle-end for
2248 simple conversions in preference to calling the front-end's convert. */
2251 fold_convert (tree type, tree arg)
2253 tree orig = TREE_TYPE (arg);
2259 if (TREE_CODE (arg) == ERROR_MARK
2260 || TREE_CODE (type) == ERROR_MARK
2261 || TREE_CODE (orig) == ERROR_MARK)
2262 return error_mark_node;
2264 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2265 return fold_build1 (NOP_EXPR, type, arg);
2267 switch (TREE_CODE (type))
2269 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2270 case POINTER_TYPE: case REFERENCE_TYPE:
2272 if (TREE_CODE (arg) == INTEGER_CST)
2274 tem = fold_convert_const (NOP_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2278 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2279 || TREE_CODE (orig) == OFFSET_TYPE)
2280 return fold_build1 (NOP_EXPR, type, arg);
2281 if (TREE_CODE (orig) == COMPLEX_TYPE)
2283 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2284 return fold_convert (type, tem);
2286 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2287 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2288 return fold_build1 (NOP_EXPR, type, arg);
2291 if (TREE_CODE (arg) == INTEGER_CST)
2293 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2294 if (tem != NULL_TREE)
2297 else if (TREE_CODE (arg) == REAL_CST)
2299 tem = fold_convert_const (NOP_EXPR, type, arg);
2300 if (tem != NULL_TREE)
2304 switch (TREE_CODE (orig))
2307 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2308 case POINTER_TYPE: case REFERENCE_TYPE:
2309 return fold_build1 (FLOAT_EXPR, type, arg);
2312 return fold_build1 (NOP_EXPR, type, arg);
2315 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2316 return fold_convert (type, tem);
2323 switch (TREE_CODE (orig))
2326 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2327 case POINTER_TYPE: case REFERENCE_TYPE:
2329 return build2 (COMPLEX_EXPR, type,
2330 fold_convert (TREE_TYPE (type), arg),
2331 fold_convert (TREE_TYPE (type), integer_zero_node));
2336 if (TREE_CODE (arg) == COMPLEX_EXPR)
2338 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2339 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2340 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2343 arg = save_expr (arg);
2344 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2345 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2346 rpart = fold_convert (TREE_TYPE (type), rpart);
2347 ipart = fold_convert (TREE_TYPE (type), ipart);
2348 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2356 if (integer_zerop (arg))
2357 return build_zero_vector (type);
2358 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2359 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2360 || TREE_CODE (orig) == VECTOR_TYPE);
2361 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2364 tem = fold_ignored_result (arg);
2365 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2367 return fold_build1 (NOP_EXPR, type, tem);
2374 /* Return false if expr can be assumed not to be an lvalue, true
2378 maybe_lvalue_p (tree x)
2380 /* We only need to wrap lvalue tree codes. */
2381 switch (TREE_CODE (x))
2392 case ALIGN_INDIRECT_REF:
2393 case MISALIGNED_INDIRECT_REF:
2395 case ARRAY_RANGE_REF:
2401 case PREINCREMENT_EXPR:
2402 case PREDECREMENT_EXPR:
2404 case TRY_CATCH_EXPR:
2405 case WITH_CLEANUP_EXPR:
2408 case GIMPLE_MODIFY_STMT:
2417 /* Assume the worst for front-end tree codes. */
2418 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 /* Return an expr equal to X but certainly not valid as an lvalue. */
2431 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2436 if (! maybe_lvalue_p (x))
2438 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2441 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2442 Zero means allow extended lvalues. */
2444 int pedantic_lvalues;
2446 /* When pedantic, return an expr equal to X but certainly not valid as a
2447 pedantic lvalue. Otherwise, return X. */
2450 pedantic_non_lvalue (tree x)
2452 if (pedantic_lvalues)
2453 return non_lvalue (x);
2458 /* Given a tree comparison code, return the code that is the logical inverse
2459 of the given code. It is not safe to do this for floating-point
2460 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2461 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2464 invert_tree_comparison (enum tree_code code, bool honor_nans)
2466 if (honor_nans && flag_trapping_math)
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2508 swap_tree_comparison (enum tree_code code)
2515 case UNORDERED_EXPR:
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2567 return COMPCODE_UNLT;
2569 return COMPCODE_UNEQ;
2571 return COMPCODE_UNLE;
2573 return COMPCODE_UNGT;
2575 return COMPCODE_LTGT;
2577 return COMPCODE_UNGE;
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2632 combine_comparisons (enum tree_code code, enum tree_code lcode,
2633 enum tree_code rcode, tree truth_type,
2634 tree ll_arg, tree lr_arg)
2636 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2637 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2638 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2639 enum comparison_code compcode;
2643 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2644 compcode = lcompcode & rcompcode;
2647 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2648 compcode = lcompcode | rcompcode;
2657 /* Eliminate unordered comparisons, as well as LTGT and ORD
2658 which are not used unless the mode has NaNs. */
2659 compcode &= ~COMPCODE_UNORD;
2660 if (compcode == COMPCODE_LTGT)
2661 compcode = COMPCODE_NE;
2662 else if (compcode == COMPCODE_ORD)
2663 compcode = COMPCODE_TRUE;
2665 else if (flag_trapping_math)
2667 /* Check that the original operation and the optimized ones will trap
2668 under the same condition. */
2669 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2670 && (lcompcode != COMPCODE_EQ)
2671 && (lcompcode != COMPCODE_ORD);
2672 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2673 && (rcompcode != COMPCODE_EQ)
2674 && (rcompcode != COMPCODE_ORD);
2675 bool trap = (compcode & COMPCODE_UNORD) == 0
2676 && (compcode != COMPCODE_EQ)
2677 && (compcode != COMPCODE_ORD);
2679 /* In a short-circuited boolean expression the LHS might be
2680 such that the RHS, if evaluated, will never trap. For
2681 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2682 if neither x nor y is NaN. (This is a mixed blessing: for
2683 example, the expression above will never trap, hence
2684 optimizing it to x < y would be invalid). */
2685 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2686 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2689 /* If the comparison was short-circuited, and only the RHS
2690 trapped, we may now generate a spurious trap. */
2692 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2695 /* If we changed the conditions that cause a trap, we lose. */
2696 if ((ltrap || rtrap) != trap)
2700 if (compcode == COMPCODE_TRUE)
2701 return constant_boolean_node (true, truth_type);
2702 else if (compcode == COMPCODE_FALSE)
2703 return constant_boolean_node (false, truth_type);
2705 return fold_build2 (compcode_to_comparison (compcode),
2706 truth_type, ll_arg, lr_arg);
2709 /* Return nonzero if CODE is a tree code that represents a truth value. */
2712 truth_value_p (enum tree_code code)
2714 return (TREE_CODE_CLASS (code) == tcc_comparison
2715 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2716 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2717 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2720 /* Return nonzero if two operands (typically of the same tree node)
2721 are necessarily equal. If either argument has side-effects this
2722 function returns zero. FLAGS modifies behavior as follows:
2724 If OEP_ONLY_CONST is set, only return nonzero for constants.
2725 This function tests whether the operands are indistinguishable;
2726 it does not test whether they are equal using C's == operation.
2727 The distinction is important for IEEE floating point, because
2728 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2729 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2731 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2732 even though it may hold multiple values during a function.
2733 This is because a GCC tree node guarantees that nothing else is
2734 executed between the evaluation of its "operands" (which may often
2735 be evaluated in arbitrary order). Hence if the operands themselves
2736 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2737 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2738 unset means assuming isochronic (or instantaneous) tree equivalence.
2739 Unless comparing arbitrary expression trees, such as from different
2740 statements, this flag can usually be left unset.
2742 If OEP_PURE_SAME is set, then pure functions with identical arguments
2743 are considered the same. It is used when the caller has other ways
2744 to ensure that global memory is unchanged in between. */
2747 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2749 /* If either is ERROR_MARK, they aren't equal. */
2750 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2753 /* If both types don't have the same signedness, then we can't consider
2754 them equal. We must check this before the STRIP_NOPS calls
2755 because they may change the signedness of the arguments. */
2756 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2759 /* If both types don't have the same precision, then it is not safe
2761 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2767 /* In case both args are comparisons but with different comparison
2768 code, try to swap the comparison operands of one arg to produce
2769 a match and compare that variant. */
2770 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2771 && COMPARISON_CLASS_P (arg0)
2772 && COMPARISON_CLASS_P (arg1))
2774 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2776 if (TREE_CODE (arg0) == swap_code)
2777 return operand_equal_p (TREE_OPERAND (arg0, 0),
2778 TREE_OPERAND (arg1, 1), flags)
2779 && operand_equal_p (TREE_OPERAND (arg0, 1),
2780 TREE_OPERAND (arg1, 0), flags);
2783 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2784 /* This is needed for conversions and for COMPONENT_REF.
2785 Might as well play it safe and always test this. */
2786 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2787 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2788 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2791 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2792 We don't care about side effects in that case because the SAVE_EXPR
2793 takes care of that for us. In all other cases, two expressions are
2794 equal if they have no side effects. If we have two identical
2795 expressions with side effects that should be treated the same due
2796 to the only side effects being identical SAVE_EXPR's, that will
2797 be detected in the recursive calls below. */
2798 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2799 && (TREE_CODE (arg0) == SAVE_EXPR
2800 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2803 /* Next handle constant cases, those for which we can return 1 even
2804 if ONLY_CONST is set. */
2805 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2806 switch (TREE_CODE (arg0))
2809 return tree_int_cst_equal (arg0, arg1);
2812 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2813 TREE_REAL_CST (arg1)))
2817 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2819 /* If we do not distinguish between signed and unsigned zero,
2820 consider them equal. */
2821 if (real_zerop (arg0) && real_zerop (arg1))
2830 v1 = TREE_VECTOR_CST_ELTS (arg0);
2831 v2 = TREE_VECTOR_CST_ELTS (arg1);
2834 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2837 v1 = TREE_CHAIN (v1);
2838 v2 = TREE_CHAIN (v2);
2845 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2847 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2851 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2852 && ! memcmp (TREE_STRING_POINTER (arg0),
2853 TREE_STRING_POINTER (arg1),
2854 TREE_STRING_LENGTH (arg0)));
2857 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2863 if (flags & OEP_ONLY_CONST)
2866 /* Define macros to test an operand from arg0 and arg1 for equality and a
2867 variant that allows null and views null as being different from any
2868 non-null value. In the latter case, if either is null, the both
2869 must be; otherwise, do the normal comparison. */
2870 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2871 TREE_OPERAND (arg1, N), flags)
2873 #define OP_SAME_WITH_NULL(N) \
2874 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2875 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2877 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2880 /* Two conversions are equal only if signedness and modes match. */
2881 switch (TREE_CODE (arg0))
2885 case FIX_TRUNC_EXPR:
2886 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2887 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2897 case tcc_comparison:
2899 if (OP_SAME (0) && OP_SAME (1))
2902 /* For commutative ops, allow the other order. */
2903 return (commutative_tree_code (TREE_CODE (arg0))
2904 && operand_equal_p (TREE_OPERAND (arg0, 0),
2905 TREE_OPERAND (arg1, 1), flags)
2906 && operand_equal_p (TREE_OPERAND (arg0, 1),
2907 TREE_OPERAND (arg1, 0), flags));
2910 /* If either of the pointer (or reference) expressions we are
2911 dereferencing contain a side effect, these cannot be equal. */
2912 if (TREE_SIDE_EFFECTS (arg0)
2913 || TREE_SIDE_EFFECTS (arg1))
2916 switch (TREE_CODE (arg0))
2919 case ALIGN_INDIRECT_REF:
2920 case MISALIGNED_INDIRECT_REF:
2926 case ARRAY_RANGE_REF:
2927 /* Operands 2 and 3 may be null.
2928 Compare the array index by value if it is constant first as we
2929 may have different types but same value here. */
2931 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2932 TREE_OPERAND (arg1, 1))
2934 && OP_SAME_WITH_NULL (2)
2935 && OP_SAME_WITH_NULL (3));
2938 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2939 may be NULL when we're called to compare MEM_EXPRs. */
2940 return OP_SAME_WITH_NULL (0)
2942 && OP_SAME_WITH_NULL (2);
2945 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2951 case tcc_expression:
2952 switch (TREE_CODE (arg0))
2955 case TRUTH_NOT_EXPR:
2958 case TRUTH_ANDIF_EXPR:
2959 case TRUTH_ORIF_EXPR:
2960 return OP_SAME (0) && OP_SAME (1);
2962 case TRUTH_AND_EXPR:
2964 case TRUTH_XOR_EXPR:
2965 if (OP_SAME (0) && OP_SAME (1))
2968 /* Otherwise take into account this is a commutative operation. */
2969 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2970 TREE_OPERAND (arg1, 1), flags)
2971 && operand_equal_p (TREE_OPERAND (arg0, 1),
2972 TREE_OPERAND (arg1, 0), flags));
2979 switch (TREE_CODE (arg0))
2982 /* If the CALL_EXPRs call different functions, then they
2983 clearly can not be equal. */
2984 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2989 unsigned int cef = call_expr_flags (arg0);
2990 if (flags & OEP_PURE_SAME)
2991 cef &= ECF_CONST | ECF_PURE;
2998 /* Now see if all the arguments are the same. */
3000 call_expr_arg_iterator iter0, iter1;
3002 for (a0 = first_call_expr_arg (arg0, &iter0),
3003 a1 = first_call_expr_arg (arg1, &iter1);
3005 a0 = next_call_expr_arg (&iter0),
3006 a1 = next_call_expr_arg (&iter1))
3007 if (! operand_equal_p (a0, a1, flags))
3010 /* If we get here and both argument lists are exhausted
3011 then the CALL_EXPRs are equal. */
3012 return ! (a0 || a1);
3018 case tcc_declaration:
3019 /* Consider __builtin_sqrt equal to sqrt. */
3020 return (TREE_CODE (arg0) == FUNCTION_DECL
3021 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3022 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3023 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3030 #undef OP_SAME_WITH_NULL
3033 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3034 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3036 When in doubt, return 0. */
3039 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3041 int unsignedp1, unsignedpo;
3042 tree primarg0, primarg1, primother;
3043 unsigned int correct_width;
3045 if (operand_equal_p (arg0, arg1, 0))
3048 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3049 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3052 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3053 and see if the inner values are the same. This removes any
3054 signedness comparison, which doesn't matter here. */
3055 primarg0 = arg0, primarg1 = arg1;
3056 STRIP_NOPS (primarg0);
3057 STRIP_NOPS (primarg1);
3058 if (operand_equal_p (primarg0, primarg1, 0))
3061 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3062 actual comparison operand, ARG0.
3064 First throw away any conversions to wider types
3065 already present in the operands. */
3067 primarg1 = get_narrower (arg1, &unsignedp1);
3068 primother = get_narrower (other, &unsignedpo);
3070 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3071 if (unsignedp1 == unsignedpo
3072 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3073 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3075 tree type = TREE_TYPE (arg0);
3077 /* Make sure shorter operand is extended the right way
3078 to match the longer operand. */
3079 primarg1 = fold_convert (signed_or_unsigned_type_for
3080 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3082 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3089 /* See if ARG is an expression that is either a comparison or is performing
3090 arithmetic on comparisons. The comparisons must only be comparing
3091 two different values, which will be stored in *CVAL1 and *CVAL2; if
3092 they are nonzero it means that some operands have already been found.
3093 No variables may be used anywhere else in the expression except in the
3094 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3095 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3097 If this is true, return 1. Otherwise, return zero. */
3100 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3102 enum tree_code code = TREE_CODE (arg);
3103 enum tree_code_class class = TREE_CODE_CLASS (code);
3105 /* We can handle some of the tcc_expression cases here. */
3106 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3108 else if (class == tcc_expression
3109 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3110 || code == COMPOUND_EXPR))
3113 else if (class == tcc_expression && code == SAVE_EXPR
3114 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3116 /* If we've already found a CVAL1 or CVAL2, this expression is
3117 two complex to handle. */
3118 if (*cval1 || *cval2)
3128 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3131 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3132 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3133 cval1, cval2, save_p));
3138 case tcc_expression:
3139 if (code == COND_EXPR)
3140 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3141 cval1, cval2, save_p)
3142 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3143 cval1, cval2, save_p)
3144 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3145 cval1, cval2, save_p));
3148 case tcc_comparison:
3149 /* First see if we can handle the first operand, then the second. For
3150 the second operand, we know *CVAL1 can't be zero. It must be that
3151 one side of the comparison is each of the values; test for the
3152 case where this isn't true by failing if the two operands
3155 if (operand_equal_p (TREE_OPERAND (arg, 0),
3156 TREE_OPERAND (arg, 1), 0))
3160 *cval1 = TREE_OPERAND (arg, 0);
3161 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3163 else if (*cval2 == 0)
3164 *cval2 = TREE_OPERAND (arg, 0);
3165 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3170 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3172 else if (*cval2 == 0)
3173 *cval2 = TREE_OPERAND (arg, 1);
3174 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3186 /* ARG is a tree that is known to contain just arithmetic operations and
3187 comparisons. Evaluate the operations in the tree substituting NEW0 for
3188 any occurrence of OLD0 as an operand of a comparison and likewise for
3192 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3194 tree type = TREE_TYPE (arg);
3195 enum tree_code code = TREE_CODE (arg);
3196 enum tree_code_class class = TREE_CODE_CLASS (code);
3198 /* We can handle some of the tcc_expression cases here. */
3199 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3201 else if (class == tcc_expression
3202 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3208 return fold_build1 (code, type,
3209 eval_subst (TREE_OPERAND (arg, 0),
3210 old0, new0, old1, new1));
3213 return fold_build2 (code, type,
3214 eval_subst (TREE_OPERAND (arg, 0),
3215 old0, new0, old1, new1),
3216 eval_subst (TREE_OPERAND (arg, 1),
3217 old0, new0, old1, new1));
3219 case tcc_expression:
3223 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3226 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3229 return fold_build3 (code, type,
3230 eval_subst (TREE_OPERAND (arg, 0),
3231 old0, new0, old1, new1),
3232 eval_subst (TREE_OPERAND (arg, 1),
3233 old0, new0, old1, new1),
3234 eval_subst (TREE_OPERAND (arg, 2),
3235 old0, new0, old1, new1));
3239 /* Fall through - ??? */
3241 case tcc_comparison:
3243 tree arg0 = TREE_OPERAND (arg, 0);
3244 tree arg1 = TREE_OPERAND (arg, 1);
3246 /* We need to check both for exact equality and tree equality. The
3247 former will be true if the operand has a side-effect. In that
3248 case, we know the operand occurred exactly once. */
3250 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3252 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3255 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3257 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3260 return fold_build2 (code, type, arg0, arg1);
3268 /* Return a tree for the case when the result of an expression is RESULT
3269 converted to TYPE and OMITTED was previously an operand of the expression
3270 but is now not needed (e.g., we folded OMITTED * 0).
3272 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3273 the conversion of RESULT to TYPE. */
3276 omit_one_operand (tree type, tree result, tree omitted)
3278 tree t = fold_convert (type, result);
3280 if (TREE_SIDE_EFFECTS (omitted))
3281 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3283 return non_lvalue (t);
3286 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3289 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3291 tree t = fold_convert (type, result);
3293 if (TREE_SIDE_EFFECTS (omitted))
3294 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3296 return pedantic_non_lvalue (t);
3299 /* Return a tree for the case when the result of an expression is RESULT
3300 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3301 of the expression but are now not needed.
3303 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3304 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3305 evaluated before OMITTED2. Otherwise, if neither has side effects,
3306 just do the conversion of RESULT to TYPE. */
3309 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3311 tree t = fold_convert (type, result);
3313 if (TREE_SIDE_EFFECTS (omitted2))
3314 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3315 if (TREE_SIDE_EFFECTS (omitted1))
3316 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3318 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3322 /* Return a simplified tree node for the truth-negation of ARG. This
3323 never alters ARG itself. We assume that ARG is an operation that
3324 returns a truth value (0 or 1).
3326 FIXME: one would think we would fold the result, but it causes
3327 problems with the dominator optimizer. */
3330 fold_truth_not_expr (tree arg)
3332 tree type = TREE_TYPE (arg);
3333 enum tree_code code = TREE_CODE (arg);
3335 /* If this is a comparison, we can simply invert it, except for
3336 floating-point non-equality comparisons, in which case we just
3337 enclose a TRUTH_NOT_EXPR around what we have. */
3339 if (TREE_CODE_CLASS (code) == tcc_comparison)
3341 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3342 if (FLOAT_TYPE_P (op_type)
3343 && flag_trapping_math
3344 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3345 && code != NE_EXPR && code != EQ_EXPR)
3349 code = invert_tree_comparison (code,
3350 HONOR_NANS (TYPE_MODE (op_type)));
3351 if (code == ERROR_MARK)
3354 return build2 (code, type,
3355 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3362 return constant_boolean_node (integer_zerop (arg), type);
3364 case TRUTH_AND_EXPR:
3365 return build2 (TRUTH_OR_EXPR, type,
3366 invert_truthvalue (TREE_OPERAND (arg, 0)),
3367 invert_truthvalue (TREE_OPERAND (arg, 1)));
3370 return build2 (TRUTH_AND_EXPR, type,
3371 invert_truthvalue (TREE_OPERAND (arg, 0)),
3372 invert_truthvalue (TREE_OPERAND (arg, 1)));
3374 case TRUTH_XOR_EXPR:
3375 /* Here we can invert either operand. We invert the first operand
3376 unless the second operand is a TRUTH_NOT_EXPR in which case our
3377 result is the XOR of the first operand with the inside of the
3378 negation of the second operand. */
3380 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3381 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3382 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3384 return build2 (TRUTH_XOR_EXPR, type,
3385 invert_truthvalue (TREE_OPERAND (arg, 0)),
3386 TREE_OPERAND (arg, 1));
3388 case TRUTH_ANDIF_EXPR:
3389 return build2 (TRUTH_ORIF_EXPR, type,
3390 invert_truthvalue (TREE_OPERAND (arg, 0)),
3391 invert_truthvalue (TREE_OPERAND (arg, 1)));
3393 case TRUTH_ORIF_EXPR:
3394 return build2 (TRUTH_ANDIF_EXPR, type,
3395 invert_truthvalue (TREE_OPERAND (arg, 0)),
3396 invert_truthvalue (TREE_OPERAND (arg, 1)));
3398 case TRUTH_NOT_EXPR:
3399 return TREE_OPERAND (arg, 0);
3403 tree arg1 = TREE_OPERAND (arg, 1);
3404 tree arg2 = TREE_OPERAND (arg, 2);
3405 /* A COND_EXPR may have a throw as one operand, which
3406 then has void type. Just leave void operands
3408 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3409 VOID_TYPE_P (TREE_TYPE (arg1))
3410 ? arg1 : invert_truthvalue (arg1),
3411 VOID_TYPE_P (TREE_TYPE (arg2))
3412 ? arg2 : invert_truthvalue (arg2));
3416 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3417 invert_truthvalue (TREE_OPERAND (arg, 1)));
3419 case NON_LVALUE_EXPR:
3420 return invert_truthvalue (TREE_OPERAND (arg, 0));
3423 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3424 return build1 (TRUTH_NOT_EXPR, type, arg);
3428 return build1 (TREE_CODE (arg), type,
3429 invert_truthvalue (TREE_OPERAND (arg, 0)));
3432 if (!integer_onep (TREE_OPERAND (arg, 1)))
3434 return build2 (EQ_EXPR, type, arg,
3435 build_int_cst (type, 0));
3438 return build1 (TRUTH_NOT_EXPR, type, arg);
3440 case CLEANUP_POINT_EXPR:
3441 return build1 (CLEANUP_POINT_EXPR, type,
3442 invert_truthvalue (TREE_OPERAND (arg, 0)));
3451 /* Return a simplified tree node for the truth-negation of ARG. This
3452 never alters ARG itself. We assume that ARG is an operation that
3453 returns a truth value (0 or 1).
3455 FIXME: one would think we would fold the result, but it causes
3456 problems with the dominator optimizer. */
3459 invert_truthvalue (tree arg)
3463 if (TREE_CODE (arg) == ERROR_MARK)
3466 tem = fold_truth_not_expr (arg);
3468 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3473 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3474 operands are another bit-wise operation with a common input. If so,
3475 distribute the bit operations to save an operation and possibly two if
3476 constants are involved. For example, convert
3477 (A | B) & (A | C) into A | (B & C)
3478 Further simplification will occur if B and C are constants.
3480 If this optimization cannot be done, 0 will be returned. */
3483 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3488 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3489 || TREE_CODE (arg0) == code
3490 || (TREE_CODE (arg0) != BIT_AND_EXPR
3491 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3494 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3496 common = TREE_OPERAND (arg0, 0);
3497 left = TREE_OPERAND (arg0, 1);
3498 right = TREE_OPERAND (arg1, 1);
3500 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3502 common = TREE_OPERAND (arg0, 0);
3503 left = TREE_OPERAND (arg0, 1);
3504 right = TREE_OPERAND (arg1, 0);
3506 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3508 common = TREE_OPERAND (arg0, 1);
3509 left = TREE_OPERAND (arg0, 0);
3510 right = TREE_OPERAND (arg1, 1);
3512 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3514 common = TREE_OPERAND (arg0, 1);
3515 left = TREE_OPERAND (arg0, 0);
3516 right = TREE_OPERAND (arg1, 0);
3521 return fold_build2 (TREE_CODE (arg0), type, common,
3522 fold_build2 (code, type, left, right));
3525 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3526 with code CODE. This optimization is unsafe. */
3528 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3530 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3531 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3533 /* (A / C) +- (B / C) -> (A +- B) / C. */
3535 && operand_equal_p (TREE_OPERAND (arg0, 1),
3536 TREE_OPERAND (arg1, 1), 0))
3537 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3538 fold_build2 (code, type,
3539 TREE_OPERAND (arg0, 0),
3540 TREE_OPERAND (arg1, 0)),
3541 TREE_OPERAND (arg0, 1));
3543 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3544 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3545 TREE_OPERAND (arg1, 0), 0)
3546 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3547 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3549 REAL_VALUE_TYPE r0, r1;
3550 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3551 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3553 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3555 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3556 real_arithmetic (&r0, code, &r0, &r1);
3557 return fold_build2 (MULT_EXPR, type,
3558 TREE_OPERAND (arg0, 0),
3559 build_real (type, r0));
3565 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3566 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3569 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3576 tree size = TYPE_SIZE (TREE_TYPE (inner));
3577 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3578 || POINTER_TYPE_P (TREE_TYPE (inner)))
3579 && host_integerp (size, 0)
3580 && tree_low_cst (size, 0) == bitsize)
3581 return fold_convert (type, inner);
3584 result = build3 (BIT_FIELD_REF, type, inner,
3585 size_int (bitsize), bitsize_int (bitpos));
3587 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3592 /* Optimize a bit-field compare.
3594 There are two cases: First is a compare against a constant and the
3595 second is a comparison of two items where the fields are at the same
3596 bit position relative to the start of a chunk (byte, halfword, word)
3597 large enough to contain it. In these cases we can avoid the shift
3598 implicit in bitfield extractions.
3600 For constants, we emit a compare of the shifted constant with the
3601 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3602 compared. For two fields at the same position, we do the ANDs with the
3603 similar mask and compare the result of the ANDs.
3605 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3606 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3607 are the left and right operands of the comparison, respectively.
3609 If the optimization described above can be done, we return the resulting
3610 tree. Otherwise we return zero. */
3613 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3616 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3617 tree type = TREE_TYPE (lhs);
3618 tree signed_type, unsigned_type;
3619 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3620 enum machine_mode lmode, rmode, nmode;
3621 int lunsignedp, runsignedp;
3622 int lvolatilep = 0, rvolatilep = 0;
3623 tree linner, rinner = NULL_TREE;
3627 /* Get all the information about the extractions being done. If the bit size
3628 if the same as the size of the underlying object, we aren't doing an
3629 extraction at all and so can do nothing. We also don't want to
3630 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3631 then will no longer be able to replace it. */
3632 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3633 &lunsignedp, &lvolatilep, false);
3634 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3635 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3640 /* If this is not a constant, we can only do something if bit positions,
3641 sizes, and signedness are the same. */
3642 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3643 &runsignedp, &rvolatilep, false);
3645 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3646 || lunsignedp != runsignedp || offset != 0
3647 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3651 /* See if we can find a mode to refer to this field. We should be able to,
3652 but fail if we can't. */
3653 nmode = get_best_mode (lbitsize, lbitpos,
3654 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3655 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3656 TYPE_ALIGN (TREE_TYPE (rinner))),
3657 word_mode, lvolatilep || rvolatilep);
3658 if (nmode == VOIDmode)
3661 /* Set signed and unsigned types of the precision of this mode for the
3663 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3664 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3666 /* Compute the bit position and size for the new reference and our offset
3667 within it. If the new reference is the same size as the original, we
3668 won't optimize anything, so return zero. */
3669 nbitsize = GET_MODE_BITSIZE (nmode);
3670 nbitpos = lbitpos & ~ (nbitsize - 1);
3672 if (nbitsize == lbitsize)
3675 if (BYTES_BIG_ENDIAN)
3676 lbitpos = nbitsize - lbitsize - lbitpos;
3678 /* Make the mask to be used against the extracted field. */
3679 mask = build_int_cst_type (unsigned_type, -1);
3680 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3681 mask = const_binop (RSHIFT_EXPR, mask,
3682 size_int (nbitsize - lbitsize - lbitpos), 0);
3685 /* If not comparing with constant, just rework the comparison
3687 return fold_build2 (code, compare_type,
3688 fold_build2 (BIT_AND_EXPR, unsigned_type,
3689 make_bit_field_ref (linner,
3694 fold_build2 (BIT_AND_EXPR, unsigned_type,
3695 make_bit_field_ref (rinner,
3701 /* Otherwise, we are handling the constant case. See if the constant is too
3702 big for the field. Warn and return a tree of for 0 (false) if so. We do
3703 this not only for its own sake, but to avoid having to test for this
3704 error case below. If we didn't, we might generate wrong code.
3706 For unsigned fields, the constant shifted right by the field length should
3707 be all zero. For signed fields, the high-order bits should agree with
3712 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3713 fold_convert (unsigned_type, rhs),
3714 size_int (lbitsize), 0)))
3716 warning (0, "comparison is always %d due to width of bit-field",
3718 return constant_boolean_node (code == NE_EXPR, compare_type);
3723 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3724 size_int (lbitsize - 1), 0);
3725 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3727 warning (0, "comparison is always %d due to width of bit-field",
3729 return constant_boolean_node (code == NE_EXPR, compare_type);
3733 /* Single-bit compares should always be against zero. */
3734 if (lbitsize == 1 && ! integer_zerop (rhs))
3736 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3737 rhs = build_int_cst (type, 0);
3740 /* Make a new bitfield reference, shift the constant over the
3741 appropriate number of bits and mask it with the computed mask
3742 (in case this was a signed field). If we changed it, make a new one. */
3743 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3746 TREE_SIDE_EFFECTS (lhs) = 1;
3747 TREE_THIS_VOLATILE (lhs) = 1;
3750 rhs = const_binop (BIT_AND_EXPR,
3751 const_binop (LSHIFT_EXPR,
3752 fold_convert (unsigned_type, rhs),
3753 size_int (lbitpos), 0),
3756 return build2 (code, compare_type,
3757 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3761 /* Subroutine for fold_truthop: decode a field reference.
3763 If EXP is a comparison reference, we return the innermost reference.
3765 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3766 set to the starting bit number.
3768 If the innermost field can be completely contained in a mode-sized
3769 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3771 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3772 otherwise it is not changed.
3774 *PUNSIGNEDP is set to the signedness of the field.
3776 *PMASK is set to the mask used. This is either contained in a
3777 BIT_AND_EXPR or derived from the width of the field.
3779 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3781 Return 0 if this is not a component reference or is one that we can't
3782 do anything with. */
3785 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3786 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3787 int *punsignedp, int *pvolatilep,
3788 tree *pmask, tree *pand_mask)
3790 tree outer_type = 0;
3792 tree mask, inner, offset;
3794 unsigned int precision;
3796 /* All the optimizations using this function assume integer fields.
3797 There are problems with FP fields since the type_for_size call
3798 below can fail for, e.g., XFmode. */
3799 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3802 /* We are interested in the bare arrangement of bits, so strip everything
3803 that doesn't affect the machine mode. However, record the type of the
3804 outermost expression if it may matter below. */
3805 if (TREE_CODE (exp) == NOP_EXPR
3806 || TREE_CODE (exp) == CONVERT_EXPR
3807 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3808 outer_type = TREE_TYPE (exp);
3811 if (TREE_CODE (exp) == BIT_AND_EXPR)
3813 and_mask = TREE_OPERAND (exp, 1);
3814 exp = TREE_OPERAND (exp, 0);
3815 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3816 if (TREE_CODE (and_mask) != INTEGER_CST)
3820 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3821 punsignedp, pvolatilep, false);
3822 if ((inner == exp && and_mask == 0)
3823 || *pbitsize < 0 || offset != 0
3824 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3827 /* If the number of bits in the reference is the same as the bitsize of
3828 the outer type, then the outer type gives the signedness. Otherwise
3829 (in case of a small bitfield) the signedness is unchanged. */
3830 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3831 *punsignedp = TYPE_UNSIGNED (outer_type);
3833 /* Compute the mask to access the bitfield. */
3834 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3835 precision = TYPE_PRECISION (unsigned_type);
3837 mask = build_int_cst_type (unsigned_type, -1);
3839 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3840 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3842 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3844 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3845 fold_convert (unsigned_type, and_mask), mask);
3848 *pand_mask = and_mask;
3852 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3856 all_ones_mask_p (tree mask, int size)
3858 tree type = TREE_TYPE (mask);
3859 unsigned int precision = TYPE_PRECISION (type);
3862 tmask = build_int_cst_type (signed_type_for (type), -1);
3865 tree_int_cst_equal (mask,
3866 const_binop (RSHIFT_EXPR,
3867 const_binop (LSHIFT_EXPR, tmask,
3868 size_int (precision - size),
3870 size_int (precision - size), 0));
3873 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3874 represents the sign bit of EXP's type. If EXP represents a sign
3875 or zero extension, also test VAL against the unextended type.
3876 The return value is the (sub)expression whose sign bit is VAL,
3877 or NULL_TREE otherwise. */
3880 sign_bit_p (tree exp, tree val)
3882 unsigned HOST_WIDE_INT mask_lo, lo;
3883 HOST_WIDE_INT mask_hi, hi;
3887 /* Tree EXP must have an integral type. */
3888 t = TREE_TYPE (exp);
3889 if (! INTEGRAL_TYPE_P (t))
3892 /* Tree VAL must be an integer constant. */
3893 if (TREE_CODE (val) != INTEGER_CST
3894 || TREE_OVERFLOW (val))
3897 width = TYPE_PRECISION (t);
3898 if (width > HOST_BITS_PER_WIDE_INT)
3900 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3903 mask_hi = ((unsigned HOST_WIDE_INT) -1
3904 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3910 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3913 mask_lo = ((unsigned HOST_WIDE_INT) -1
3914 >> (HOST_BITS_PER_WIDE_INT - width));
3917 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3918 treat VAL as if it were unsigned. */
3919 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3920 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3923 /* Handle extension from a narrower type. */
3924 if (TREE_CODE (exp) == NOP_EXPR
3925 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3926 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3931 /* Subroutine for fold_truthop: determine if an operand is simple enough
3932 to be evaluated unconditionally. */
3935 simple_operand_p (tree exp)
3937 /* Strip any conversions that don't change the machine mode. */
3940 return (CONSTANT_CLASS_P (exp)
3941 || TREE_CODE (exp) == SSA_NAME
3943 && ! TREE_ADDRESSABLE (exp)
3944 && ! TREE_THIS_VOLATILE (exp)
3945 && ! DECL_NONLOCAL (exp)
3946 /* Don't regard global variables as simple. They may be
3947 allocated in ways unknown to the compiler (shared memory,
3948 #pragma weak, etc). */
3949 && ! TREE_PUBLIC (exp)
3950 && ! DECL_EXTERNAL (exp)
3951 /* Loading a static variable is unduly expensive, but global
3952 registers aren't expensive. */
3953 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3956 /* The following functions are subroutines to fold_range_test and allow it to
3957 try to change a logical combination of comparisons into a range test.
3960 X == 2 || X == 3 || X == 4 || X == 5
3964 (unsigned) (X - 2) <= 3
3966 We describe each set of comparisons as being either inside or outside
3967 a range, using a variable named like IN_P, and then describe the
3968 range with a lower and upper bound. If one of the bounds is omitted,
3969 it represents either the highest or lowest value of the type.
3971 In the comments below, we represent a range by two numbers in brackets
3972 preceded by a "+" to designate being inside that range, or a "-" to
3973 designate being outside that range, so the condition can be inverted by
3974 flipping the prefix. An omitted bound is represented by a "-". For
3975 example, "- [-, 10]" means being outside the range starting at the lowest
3976 possible value and ending at 10, in other words, being greater than 10.
3977 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3980 We set up things so that the missing bounds are handled in a consistent
3981 manner so neither a missing bound nor "true" and "false" need to be
3982 handled using a special case. */
3984 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3985 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3986 and UPPER1_P are nonzero if the respective argument is an upper bound
3987 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3988 must be specified for a comparison. ARG1 will be converted to ARG0's
3989 type if both are specified. */
3992 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3993 tree arg1, int upper1_p)
3999 /* If neither arg represents infinity, do the normal operation.
4000 Else, if not a comparison, return infinity. Else handle the special
4001 comparison rules. Note that most of the cases below won't occur, but
4002 are handled for consistency. */
4004 if (arg0 != 0 && arg1 != 0)
4006 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4007 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4009 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4012 if (TREE_CODE_CLASS (code) != tcc_comparison)
4015 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4016 for neither. In real maths, we cannot assume open ended ranges are
4017 the same. But, this is computer arithmetic, where numbers are finite.
4018 We can therefore make the transformation of any unbounded range with
4019 the value Z, Z being greater than any representable number. This permits
4020 us to treat unbounded ranges as equal. */
4021 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4022 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4026 result = sgn0 == sgn1;
4029 result = sgn0 != sgn1;
4032 result = sgn0 < sgn1;
4035 result = sgn0 <= sgn1;
4038 result = sgn0 > sgn1;
4041 result = sgn0 >= sgn1;
4047 return constant_boolean_node (result, type);
4050 /* Given EXP, a logical expression, set the range it is testing into
4051 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4052 actually being tested. *PLOW and *PHIGH will be made of the same
4053 type as the returned expression. If EXP is not a comparison, we
4054 will most likely not be returning a useful value and range. Set
4055 *STRICT_OVERFLOW_P to true if the return value is only valid
4056 because signed overflow is undefined; otherwise, do not change
4057 *STRICT_OVERFLOW_P. */
4060 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4061 bool *strict_overflow_p)
4063 enum tree_code code;
4064 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4065 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4067 tree low, high, n_low, n_high;
4069 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4070 and see if we can refine the range. Some of the cases below may not
4071 happen, but it doesn't seem worth worrying about this. We "continue"
4072 the outer loop when we've changed something; otherwise we "break"
4073 the switch, which will "break" the while. */
4076 low = high = build_int_cst (TREE_TYPE (exp), 0);
4080 code = TREE_CODE (exp);
4081 exp_type = TREE_TYPE (exp);
4083 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4085 if (TREE_OPERAND_LENGTH (exp) > 0)
4086 arg0 = TREE_OPERAND (exp, 0);
4087 if (TREE_CODE_CLASS (code) == tcc_comparison
4088 || TREE_CODE_CLASS (code) == tcc_unary
4089 || TREE_CODE_CLASS (code) == tcc_binary)
4090 arg0_type = TREE_TYPE (arg0);
4091 if (TREE_CODE_CLASS (code) == tcc_binary
4092 || TREE_CODE_CLASS (code) == tcc_comparison
4093 || (TREE_CODE_CLASS (code) == tcc_expression
4094 && TREE_OPERAND_LENGTH (exp) > 1))
4095 arg1 = TREE_OPERAND (exp, 1);
4100 case TRUTH_NOT_EXPR:
4101 in_p = ! in_p, exp = arg0;
4104 case EQ_EXPR: case NE_EXPR:
4105 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4106 /* We can only do something if the range is testing for zero
4107 and if the second operand is an integer constant. Note that
4108 saying something is "in" the range we make is done by
4109 complementing IN_P since it will set in the initial case of
4110 being not equal to zero; "out" is leaving it alone. */
4111 if (low == 0 || high == 0
4112 || ! integer_zerop (low) || ! integer_zerop (high)
4113 || TREE_CODE (arg1) != INTEGER_CST)
4118 case NE_EXPR: /* - [c, c] */
4121 case EQ_EXPR: /* + [c, c] */
4122 in_p = ! in_p, low = high = arg1;
4124 case GT_EXPR: /* - [-, c] */
4125 low = 0, high = arg1;
4127 case GE_EXPR: /* + [c, -] */
4128 in_p = ! in_p, low = arg1, high = 0;
4130 case LT_EXPR: /* - [c, -] */
4131 low = arg1, high = 0;
4133 case LE_EXPR: /* + [-, c] */
4134 in_p = ! in_p, low = 0, high = arg1;
4140 /* If this is an unsigned comparison, we also know that EXP is
4141 greater than or equal to zero. We base the range tests we make
4142 on that fact, so we record it here so we can parse existing
4143 range tests. We test arg0_type since often the return type
4144 of, e.g. EQ_EXPR, is boolean. */
4145 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4147 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4149 build_int_cst (arg0_type, 0),
4153 in_p = n_in_p, low = n_low, high = n_high;
4155 /* If the high bound is missing, but we have a nonzero low
4156 bound, reverse the range so it goes from zero to the low bound
4158 if (high == 0 && low && ! integer_zerop (low))
4161 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4162 integer_one_node, 0);
4163 low = build_int_cst (arg0_type, 0);
4171 /* (-x) IN [a,b] -> x in [-b, -a] */
4172 n_low = range_binop (MINUS_EXPR, exp_type,
4173 build_int_cst (exp_type, 0),
4175 n_high = range_binop (MINUS_EXPR, exp_type,
4176 build_int_cst (exp_type, 0),
4178 low = n_low, high = n_high;
4184 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4185 build_int_cst (exp_type, 1));
4188 case PLUS_EXPR: case MINUS_EXPR:
4189 if (TREE_CODE (arg1) != INTEGER_CST)
4192 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4193 move a constant to the other side. */
4194 if (!TYPE_UNSIGNED (arg0_type)
4195 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4198 /* If EXP is signed, any overflow in the computation is undefined,
4199 so we don't worry about it so long as our computations on
4200 the bounds don't overflow. For unsigned, overflow is defined
4201 and this is exactly the right thing. */
4202 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4203 arg0_type, low, 0, arg1, 0);
4204 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4205 arg0_type, high, 1, arg1, 0);
4206 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4207 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4210 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4211 *strict_overflow_p = true;
4213 /* Check for an unsigned range which has wrapped around the maximum
4214 value thus making n_high < n_low, and normalize it. */
4215 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4217 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4218 integer_one_node, 0);
4219 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4220 integer_one_node, 0);
4222 /* If the range is of the form +/- [ x+1, x ], we won't
4223 be able to normalize it. But then, it represents the
4224 whole range or the empty set, so make it
4226 if (tree_int_cst_equal (n_low, low)
4227 && tree_int_cst_equal (n_high, high))
4233 low = n_low, high = n_high;
4238 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4239 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4242 if (! INTEGRAL_TYPE_P (arg0_type)
4243 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4244 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4247 n_low = low, n_high = high;
4250 n_low = fold_convert (arg0_type, n_low);
4253 n_high = fold_convert (arg0_type, n_high);
4256 /* If we're converting arg0 from an unsigned type, to exp,
4257 a signed type, we will be doing the comparison as unsigned.
4258 The tests above have already verified that LOW and HIGH
4261 So we have to ensure that we will handle large unsigned
4262 values the same way that the current signed bounds treat
4265 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4268 tree equiv_type = lang_hooks.types.type_for_mode
4269 (TYPE_MODE (arg0_type), 1);
4271 /* A range without an upper bound is, naturally, unbounded.
4272 Since convert would have cropped a very large value, use
4273 the max value for the destination type. */
4275 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4276 : TYPE_MAX_VALUE (arg0_type);
4278 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4279 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4280 fold_convert (arg0_type,
4282 build_int_cst (arg0_type, 1));
4284 /* If the low bound is specified, "and" the range with the
4285 range for which the original unsigned value will be
4289 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4290 1, n_low, n_high, 1,
4291 fold_convert (arg0_type,
4296 in_p = (n_in_p == in_p);
4300 /* Otherwise, "or" the range with the range of the input
4301 that will be interpreted as negative. */
4302 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4303 0, n_low, n_high, 1,
4304 fold_convert (arg0_type,
4309 in_p = (in_p != n_in_p);
4314 low = n_low, high = n_high;
4324 /* If EXP is a constant, we can evaluate whether this is true or false. */
4325 if (TREE_CODE (exp) == INTEGER_CST)
4327 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4329 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4335 *pin_p = in_p, *plow = low, *phigh = high;
4339 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4340 type, TYPE, return an expression to test if EXP is in (or out of, depending
4341 on IN_P) the range. Return 0 if the test couldn't be created. */
4344 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4346 tree etype = TREE_TYPE (exp);
4349 #ifdef HAVE_canonicalize_funcptr_for_compare
4350 /* Disable this optimization for function pointer expressions
4351 on targets that require function pointer canonicalization. */
4352 if (HAVE_canonicalize_funcptr_for_compare
4353 && TREE_CODE (etype) == POINTER_TYPE
4354 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4360 value = build_range_check (type, exp, 1, low, high);
4362 return invert_truthvalue (value);
4367 if (low == 0 && high == 0)
4368 return build_int_cst (type, 1);
4371 return fold_build2 (LE_EXPR, type, exp,
4372 fold_convert (etype, high));
4375 return fold_build2 (GE_EXPR, type, exp,
4376 fold_convert (etype, low));
4378 if (operand_equal_p (low, high, 0))
4379 return fold_build2 (EQ_EXPR, type, exp,
4380 fold_convert (etype, low));
4382 if (integer_zerop (low))
4384 if (! TYPE_UNSIGNED (etype))
4386 etype = unsigned_type_for (etype);
4387 high = fold_convert (etype, high);
4388 exp = fold_convert (etype, exp);
4390 return build_range_check (type, exp, 1, 0, high);
4393 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4394 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4396 unsigned HOST_WIDE_INT lo;
4400 prec = TYPE_PRECISION (etype);
4401 if (prec <= HOST_BITS_PER_WIDE_INT)
4404 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4408 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4409 lo = (unsigned HOST_WIDE_INT) -1;
4412 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4414 if (TYPE_UNSIGNED (etype))
4416 etype = signed_type_for (etype);
4417 exp = fold_convert (etype, exp);
4419 return fold_build2 (GT_EXPR, type, exp,
4420 build_int_cst (etype, 0));
4424 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4425 This requires wrap-around arithmetics for the type of the expression. */
4426 switch (TREE_CODE (etype))
4429 /* There is no requirement that LOW be within the range of ETYPE
4430 if the latter is a subtype. It must, however, be within the base
4431 type of ETYPE. So be sure we do the subtraction in that type. */
4432 if (TREE_TYPE (etype))
4433 etype = TREE_TYPE (etype);
4438 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4439 TYPE_UNSIGNED (etype));
4446 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4447 if (TREE_CODE (etype) == INTEGER_TYPE
4448 && !TYPE_OVERFLOW_WRAPS (etype))
4450 tree utype, minv, maxv;
4452 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4453 for the type in question, as we rely on this here. */
4454 utype = unsigned_type_for (etype);
4455 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4456 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4457 integer_one_node, 1);
4458 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4460 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4467 high = fold_convert (etype, high);
4468 low = fold_convert (etype, low);
4469 exp = fold_convert (etype, exp);
4471 value = const_binop (MINUS_EXPR, high, low, 0);
4474 if (POINTER_TYPE_P (etype))
4476 if (value != 0 && !TREE_OVERFLOW (value))
4478 low = fold_convert (sizetype, low);
4479 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4480 return build_range_check (type,
4481 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4482 1, build_int_cst (etype, 0), value);
4487 if (value != 0 && !TREE_OVERFLOW (value))
4488 return build_range_check (type,
4489 fold_build2 (MINUS_EXPR, etype, exp, low),
4490 1, build_int_cst (etype, 0), value);
4495 /* Return the predecessor of VAL in its type, handling the infinite case. */
4498 range_predecessor (tree val)
4500 tree type = TREE_TYPE (val);
4502 if (INTEGRAL_TYPE_P (type)
4503 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4506 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4509 /* Return the successor of VAL in its type, handling the infinite case. */
4512 range_successor (tree val)
4514 tree type = TREE_TYPE (val);
4516 if (INTEGRAL_TYPE_P (type)
4517 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4520 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4523 /* Given two ranges, see if we can merge them into one. Return 1 if we
4524 can, 0 if we can't. Set the output range into the specified parameters. */
4527 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4528 tree high0, int in1_p, tree low1, tree high1)
4536 int lowequal = ((low0 == 0 && low1 == 0)
4537 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4538 low0, 0, low1, 0)));
4539 int highequal = ((high0 == 0 && high1 == 0)
4540 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4541 high0, 1, high1, 1)));
4543 /* Make range 0 be the range that starts first, or ends last if they
4544 start at the same value. Swap them if it isn't. */
4545 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4548 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4549 high1, 1, high0, 1))))
4551 temp = in0_p, in0_p = in1_p, in1_p = temp;
4552 tem = low0, low0 = low1, low1 = tem;
4553 tem = high0, high0 = high1, high1 = tem;
4556 /* Now flag two cases, whether the ranges are disjoint or whether the
4557 second range is totally subsumed in the first. Note that the tests
4558 below are simplified by the ones above. */
4559 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4560 high0, 1, low1, 0));
4561 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4562 high1, 1, high0, 1));
4564 /* We now have four cases, depending on whether we are including or
4565 excluding the two ranges. */
4568 /* If they don't overlap, the result is false. If the second range
4569 is a subset it is the result. Otherwise, the range is from the start
4570 of the second to the end of the first. */
4572 in_p = 0, low = high = 0;
4574 in_p = 1, low = low1, high = high1;
4576 in_p = 1, low = low1, high = high0;
4579 else if (in0_p && ! in1_p)
4581 /* If they don't overlap, the result is the first range. If they are
4582 equal, the result is false. If the second range is a subset of the
4583 first, and the ranges begin at the same place, we go from just after
4584 the end of the second range to the end of the first. If the second
4585 range is not a subset of the first, or if it is a subset and both
4586 ranges end at the same place, the range starts at the start of the
4587 first range and ends just before the second range.
4588 Otherwise, we can't describe this as a single range. */
4590 in_p = 1, low = low0, high = high0;
4591 else if (lowequal && highequal)
4592 in_p = 0, low = high = 0;
4593 else if (subset && lowequal)
4595 low = range_successor (high1);
4600 /* We are in the weird situation where high0 > high1 but
4601 high1 has no successor. Punt. */
4605 else if (! subset || highequal)
4608 high = range_predecessor (low1);
4612 /* low0 < low1 but low1 has no predecessor. Punt. */
4620 else if (! in0_p && in1_p)
4622 /* If they don't overlap, the result is the second range. If the second
4623 is a subset of the first, the result is false. Otherwise,
4624 the range starts just after the first range and ends at the
4625 end of the second. */
4627 in_p = 1, low = low1, high = high1;
4628 else if (subset || highequal)
4629 in_p = 0, low = high = 0;
4632 low = range_successor (high0);
4637 /* high1 > high0 but high0 has no successor. Punt. */
4645 /* The case where we are excluding both ranges. Here the complex case
4646 is if they don't overlap. In that case, the only time we have a
4647 range is if they are adjacent. If the second is a subset of the
4648 first, the result is the first. Otherwise, the range to exclude
4649 starts at the beginning of the first range and ends at the end of the
4653 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4654 range_successor (high0),
4656 in_p = 0, low = low0, high = high1;
4659 /* Canonicalize - [min, x] into - [-, x]. */
4660 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4661 switch (TREE_CODE (TREE_TYPE (low0)))
4664 if (TYPE_PRECISION (TREE_TYPE (low0))
4665 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4669 if (tree_int_cst_equal (low0,
4670 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4674 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4675 && integer_zerop (low0))
4682 /* Canonicalize - [x, max] into - [x, -]. */
4683 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4684 switch (TREE_CODE (TREE_TYPE (high1)))
4687 if (TYPE_PRECISION (TREE_TYPE (high1))
4688 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4692 if (tree_int_cst_equal (high1,
4693 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4697 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4698 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4700 integer_one_node, 1)))
4707 /* The ranges might be also adjacent between the maximum and
4708 minimum values of the given type. For
4709 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4710 return + [x + 1, y - 1]. */
4711 if (low0 == 0 && high1 == 0)
4713 low = range_successor (high0);
4714 high = range_predecessor (low1);
4715 if (low == 0 || high == 0)
4725 in_p = 0, low = low0, high = high0;
4727 in_p = 0, low = low0, high = high1;
4730 *pin_p = in_p, *plow = low, *phigh = high;
4735 /* Subroutine of fold, looking inside expressions of the form
4736 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4737 of the COND_EXPR. This function is being used also to optimize
4738 A op B ? C : A, by reversing the comparison first.
4740 Return a folded expression whose code is not a COND_EXPR
4741 anymore, or NULL_TREE if no folding opportunity is found. */
4744 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4746 enum tree_code comp_code = TREE_CODE (arg0);
4747 tree arg00 = TREE_OPERAND (arg0, 0);
4748 tree arg01 = TREE_OPERAND (arg0, 1);
4749 tree arg1_type = TREE_TYPE (arg1);
4755 /* If we have A op 0 ? A : -A, consider applying the following
4758 A == 0? A : -A same as -A
4759 A != 0? A : -A same as A
4760 A >= 0? A : -A same as abs (A)
4761 A > 0? A : -A same as abs (A)
4762 A <= 0? A : -A same as -abs (A)
4763 A < 0? A : -A same as -abs (A)
4765 None of these transformations work for modes with signed
4766 zeros. If A is +/-0, the first two transformations will
4767 change the sign of the result (from +0 to -0, or vice
4768 versa). The last four will fix the sign of the result,
4769 even though the original expressions could be positive or
4770 negative, depending on the sign of A.
4772 Note that all these transformations are correct if A is
4773 NaN, since the two alternatives (A and -A) are also NaNs. */
4774 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4775 ? real_zerop (arg01)
4776 : integer_zerop (arg01))
4777 && ((TREE_CODE (arg2) == NEGATE_EXPR
4778 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4779 /* In the case that A is of the form X-Y, '-A' (arg2) may
4780 have already been folded to Y-X, check for that. */
4781 || (TREE_CODE (arg1) == MINUS_EXPR
4782 && TREE_CODE (arg2) == MINUS_EXPR
4783 && operand_equal_p (TREE_OPERAND (arg1, 0),
4784 TREE_OPERAND (arg2, 1), 0)
4785 && operand_equal_p (TREE_OPERAND (arg1, 1),
4786 TREE_OPERAND (arg2, 0), 0))))
4791 tem = fold_convert (arg1_type, arg1);
4792 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4795 return pedantic_non_lvalue (fold_convert (type, arg1));
4798 if (flag_trapping_math)
4803 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4804 arg1 = fold_convert (signed_type_for
4805 (TREE_TYPE (arg1)), arg1);
4806 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4807 return pedantic_non_lvalue (fold_convert (type, tem));
4810 if (flag_trapping_math)
4814 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4815 arg1 = fold_convert (signed_type_for
4816 (TREE_TYPE (arg1)), arg1);
4817 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4818 return negate_expr (fold_convert (type, tem));
4820 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4824 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4825 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4826 both transformations are correct when A is NaN: A != 0
4827 is then true, and A == 0 is false. */
4829 if (integer_zerop (arg01) && integer_zerop (arg2))
4831 if (comp_code == NE_EXPR)
4832 return pedantic_non_lvalue (fold_convert (type, arg1));
4833 else if (comp_code == EQ_EXPR)
4834 return build_int_cst (type, 0);
4837 /* Try some transformations of A op B ? A : B.
4839 A == B? A : B same as B
4840 A != B? A : B same as A
4841 A >= B? A : B same as max (A, B)
4842 A > B? A : B same as max (B, A)
4843 A <= B? A : B same as min (A, B)
4844 A < B? A : B same as min (B, A)
4846 As above, these transformations don't work in the presence
4847 of signed zeros. For example, if A and B are zeros of
4848 opposite sign, the first two transformations will change
4849 the sign of the result. In the last four, the original
4850 expressions give different results for (A=+0, B=-0) and
4851 (A=-0, B=+0), but the transformed expressions do not.
4853 The first two transformations are correct if either A or B
4854 is a NaN. In the first transformation, the condition will
4855 be false, and B will indeed be chosen. In the case of the
4856 second transformation, the condition A != B will be true,
4857 and A will be chosen.
4859 The conversions to max() and min() are not correct if B is
4860 a number and A is not. The conditions in the original
4861 expressions will be false, so all four give B. The min()
4862 and max() versions would give a NaN instead. */
4863 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4864 /* Avoid these transformations if the COND_EXPR may be used
4865 as an lvalue in the C++ front-end. PR c++/19199. */
4867 || (strcmp (lang_hooks.name, "GNU C++") != 0
4868 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4869 || ! maybe_lvalue_p (arg1)
4870 || ! maybe_lvalue_p (arg2)))
4872 tree comp_op0 = arg00;
4873 tree comp_op1 = arg01;
4874 tree comp_type = TREE_TYPE (comp_op0);
4876 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4877 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4887 return pedantic_non_lvalue (fold_convert (type, arg2));
4889 return pedantic_non_lvalue (fold_convert (type, arg1));
4894 /* In C++ a ?: expression can be an lvalue, so put the
4895 operand which will be used if they are equal first
4896 so that we can convert this back to the
4897 corresponding COND_EXPR. */
4898 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4900 comp_op0 = fold_convert (comp_type, comp_op0);
4901 comp_op1 = fold_convert (comp_type, comp_op1);
4902 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4903 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4904 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4905 return pedantic_non_lvalue (fold_convert (type, tem));
4912 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4914 comp_op0 = fold_convert (comp_type, comp_op0);
4915 comp_op1 = fold_convert (comp_type, comp_op1);
4916 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4917 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4918 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4919 return pedantic_non_lvalue (fold_convert (type, tem));
4923 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4924 return pedantic_non_lvalue (fold_convert (type, arg2));
4927 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4928 return pedantic_non_lvalue (fold_convert (type, arg1));
4931 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4936 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4937 we might still be able to simplify this. For example,
4938 if C1 is one less or one more than C2, this might have started
4939 out as a MIN or MAX and been transformed by this function.
4940 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4942 if (INTEGRAL_TYPE_P (type)
4943 && TREE_CODE (arg01) == INTEGER_CST
4944 && TREE_CODE (arg2) == INTEGER_CST)
4948 /* We can replace A with C1 in this case. */
4949 arg1 = fold_convert (type, arg01);
4950 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4953 /* If C1 is C2 + 1, this is min(A, C2). */
4954 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4956 && operand_equal_p (arg01,
4957 const_binop (PLUS_EXPR, arg2,
4958 build_int_cst (type, 1), 0),
4960 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4962 fold_convert (type, arg1),
4967 /* If C1 is C2 - 1, this is min(A, C2). */
4968 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4970 && operand_equal_p (arg01,
4971 const_binop (MINUS_EXPR, arg2,
4972 build_int_cst (type, 1), 0),
4974 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4976 fold_convert (type, arg1),
4981 /* If C1 is C2 - 1, this is max(A, C2). */
4982 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4984 && operand_equal_p (arg01,
4985 const_binop (MINUS_EXPR, arg2,
4986 build_int_cst (type, 1), 0),
4988 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4990 fold_convert (type, arg1),
4995 /* If C1 is C2 + 1, this is max(A, C2). */
4996 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4998 && operand_equal_p (arg01,
4999 const_binop (PLUS_EXPR, arg2,
5000 build_int_cst (type, 1), 0),
5002 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5004 fold_convert (type, arg1),
5018 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5019 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5022 /* EXP is some logical combination of boolean tests. See if we can
5023 merge it into some range test. Return the new tree if so. */
5026 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5028 int or_op = (code == TRUTH_ORIF_EXPR
5029 || code == TRUTH_OR_EXPR);
5030 int in0_p, in1_p, in_p;
5031 tree low0, low1, low, high0, high1, high;
5032 bool strict_overflow_p = false;
5033 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5034 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5036 const char * const warnmsg = G_("assuming signed overflow does not occur "
5037 "when simplifying range test");
5039 /* If this is an OR operation, invert both sides; we will invert
5040 again at the end. */
5042 in0_p = ! in0_p, in1_p = ! in1_p;
5044 /* If both expressions are the same, if we can merge the ranges, and we
5045 can build the range test, return it or it inverted. If one of the
5046 ranges is always true or always false, consider it to be the same
5047 expression as the other. */
5048 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5049 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5051 && 0 != (tem = (build_range_check (type,
5053 : rhs != 0 ? rhs : integer_zero_node,
5056 if (strict_overflow_p)
5057 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5058 return or_op ? invert_truthvalue (tem) : tem;
5061 /* On machines where the branch cost is expensive, if this is a
5062 short-circuited branch and the underlying object on both sides
5063 is the same, make a non-short-circuit operation. */
5064 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5065 && lhs != 0 && rhs != 0
5066 && (code == TRUTH_ANDIF_EXPR
5067 || code == TRUTH_ORIF_EXPR)
5068 && operand_equal_p (lhs, rhs, 0))
5070 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5071 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5072 which cases we can't do this. */
5073 if (simple_operand_p (lhs))
5074 return build2 (code == TRUTH_ANDIF_EXPR
5075 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5078 else if (lang_hooks.decls.global_bindings_p () == 0
5079 && ! CONTAINS_PLACEHOLDER_P (lhs))
5081 tree common = save_expr (lhs);
5083 if (0 != (lhs = build_range_check (type, common,
5084 or_op ? ! in0_p : in0_p,
5086 && (0 != (rhs = build_range_check (type, common,
5087 or_op ? ! in1_p : in1_p,
5090 if (strict_overflow_p)
5091 fold_overflow_warning (warnmsg,
5092 WARN_STRICT_OVERFLOW_COMPARISON);
5093 return build2 (code == TRUTH_ANDIF_EXPR
5094 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5103 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5104 bit value. Arrange things so the extra bits will be set to zero if and
5105 only if C is signed-extended to its full width. If MASK is nonzero,
5106 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5109 unextend (tree c, int p, int unsignedp, tree mask)
5111 tree type = TREE_TYPE (c);
5112 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5115 if (p == modesize || unsignedp)
5118 /* We work by getting just the sign bit into the low-order bit, then
5119 into the high-order bit, then sign-extend. We then XOR that value
5121 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5122 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5124 /* We must use a signed type in order to get an arithmetic right shift.
5125 However, we must also avoid introducing accidental overflows, so that
5126 a subsequent call to integer_zerop will work. Hence we must
5127 do the type conversion here. At this point, the constant is either
5128 zero or one, and the conversion to a signed type can never overflow.
5129 We could get an overflow if this conversion is done anywhere else. */
5130 if (TYPE_UNSIGNED (type))
5131 temp = fold_convert (signed_type_for (type), temp);
5133 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5134 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5136 temp = const_binop (BIT_AND_EXPR, temp,
5137 fold_convert (TREE_TYPE (c), mask), 0);
5138 /* If necessary, convert the type back to match the type of C. */
5139 if (TYPE_UNSIGNED (type))
5140 temp = fold_convert (type, temp);
5142 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5145 /* Find ways of folding logical expressions of LHS and RHS:
5146 Try to merge two comparisons to the same innermost item.
5147 Look for range tests like "ch >= '0' && ch <= '9'".
5148 Look for combinations of simple terms on machines with expensive branches
5149 and evaluate the RHS unconditionally.
5151 For example, if we have p->a == 2 && p->b == 4 and we can make an
5152 object large enough to span both A and B, we can do this with a comparison
5153 against the object ANDed with the a mask.
5155 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5156 operations to do this with one comparison.
5158 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5159 function and the one above.
5161 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5162 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5164 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5167 We return the simplified tree or 0 if no optimization is possible. */
5170 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5172 /* If this is the "or" of two comparisons, we can do something if
5173 the comparisons are NE_EXPR. If this is the "and", we can do something
5174 if the comparisons are EQ_EXPR. I.e.,
5175 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5177 WANTED_CODE is this operation code. For single bit fields, we can
5178 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5179 comparison for one-bit fields. */
5181 enum tree_code wanted_code;
5182 enum tree_code lcode, rcode;
5183 tree ll_arg, lr_arg, rl_arg, rr_arg;
5184 tree ll_inner, lr_inner, rl_inner, rr_inner;
5185 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5186 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5187 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5188 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5189 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5190 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5191 enum machine_mode lnmode, rnmode;
5192 tree ll_mask, lr_mask, rl_mask, rr_mask;
5193 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5194 tree l_const, r_const;
5195 tree lntype, rntype, result;
5196 int first_bit, end_bit;
5198 tree orig_lhs = lhs, orig_rhs = rhs;
5199 enum tree_code orig_code = code;
5201 /* Start by getting the comparison codes. Fail if anything is volatile.
5202 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5203 it were surrounded with a NE_EXPR. */
5205 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5208 lcode = TREE_CODE (lhs);
5209 rcode = TREE_CODE (rhs);
5211 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5213 lhs = build2 (NE_EXPR, truth_type, lhs,
5214 build_int_cst (TREE_TYPE (lhs), 0));
5218 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5220 rhs = build2 (NE_EXPR, truth_type, rhs,
5221 build_int_cst (TREE_TYPE (rhs), 0));
5225 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5226 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5229 ll_arg = TREE_OPERAND (lhs, 0);
5230 lr_arg = TREE_OPERAND (lhs, 1);
5231 rl_arg = TREE_OPERAND (rhs, 0);
5232 rr_arg = TREE_OPERAND (rhs, 1);
5234 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5235 if (simple_operand_p (ll_arg)
5236 && simple_operand_p (lr_arg))
5239 if (operand_equal_p (ll_arg, rl_arg, 0)
5240 && operand_equal_p (lr_arg, rr_arg, 0))
5242 result = combine_comparisons (code, lcode, rcode,
5243 truth_type, ll_arg, lr_arg);
5247 else if (operand_equal_p (ll_arg, rr_arg, 0)
5248 && operand_equal_p (lr_arg, rl_arg, 0))
5250 result = combine_comparisons (code, lcode,
5251 swap_tree_comparison (rcode),
5252 truth_type, ll_arg, lr_arg);
5258 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5259 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5261 /* If the RHS can be evaluated unconditionally and its operands are
5262 simple, it wins to evaluate the RHS unconditionally on machines
5263 with expensive branches. In this case, this isn't a comparison
5264 that can be merged. Avoid doing this if the RHS is a floating-point
5265 comparison since those can trap. */
5267 if (BRANCH_COST >= 2
5268 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5269 && simple_operand_p (rl_arg)
5270 && simple_operand_p (rr_arg))
5272 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5273 if (code == TRUTH_OR_EXPR
5274 && lcode == NE_EXPR && integer_zerop (lr_arg)
5275 && rcode == NE_EXPR && integer_zerop (rr_arg)
5276 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5277 return build2 (NE_EXPR, truth_type,
5278 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5280 build_int_cst (TREE_TYPE (ll_arg), 0));
5282 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5283 if (code == TRUTH_AND_EXPR
5284 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5285 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5286 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5287 return build2 (EQ_EXPR, truth_type,
5288 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5290 build_int_cst (TREE_TYPE (ll_arg), 0));
5292 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5294 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5295 return build2 (code, truth_type, lhs, rhs);
5300 /* See if the comparisons can be merged. Then get all the parameters for
5303 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5304 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5308 ll_inner = decode_field_reference (ll_arg,
5309 &ll_bitsize, &ll_bitpos, &ll_mode,
5310 &ll_unsignedp, &volatilep, &ll_mask,
5312 lr_inner = decode_field_reference (lr_arg,
5313 &lr_bitsize, &lr_bitpos, &lr_mode,
5314 &lr_unsignedp, &volatilep, &lr_mask,
5316 rl_inner = decode_field_reference (rl_arg,
5317 &rl_bitsize, &rl_bitpos, &rl_mode,
5318 &rl_unsignedp, &volatilep, &rl_mask,
5320 rr_inner = decode_field_reference (rr_arg,
5321 &rr_bitsize, &rr_bitpos, &rr_mode,
5322 &rr_unsignedp, &volatilep, &rr_mask,
5325 /* It must be true that the inner operation on the lhs of each
5326 comparison must be the same if we are to be able to do anything.
5327 Then see if we have constants. If not, the same must be true for
5329 if (volatilep || ll_inner == 0 || rl_inner == 0
5330 || ! operand_equal_p (ll_inner, rl_inner, 0))
5333 if (TREE_CODE (lr_arg) == INTEGER_CST
5334 && TREE_CODE (rr_arg) == INTEGER_CST)
5335 l_const = lr_arg, r_const = rr_arg;
5336 else if (lr_inner == 0 || rr_inner == 0
5337 || ! operand_equal_p (lr_inner, rr_inner, 0))
5340 l_const = r_const = 0;
5342 /* If either comparison code is not correct for our logical operation,
5343 fail. However, we can convert a one-bit comparison against zero into
5344 the opposite comparison against that bit being set in the field. */
5346 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5347 if (lcode != wanted_code)
5349 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5351 /* Make the left operand unsigned, since we are only interested
5352 in the value of one bit. Otherwise we are doing the wrong
5361 /* This is analogous to the code for l_const above. */
5362 if (rcode != wanted_code)
5364 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5373 /* See if we can find a mode that contains both fields being compared on
5374 the left. If we can't, fail. Otherwise, update all constants and masks
5375 to be relative to a field of that size. */
5376 first_bit = MIN (ll_bitpos, rl_bitpos);
5377 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5378 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5379 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5381 if (lnmode == VOIDmode)
5384 lnbitsize = GET_MODE_BITSIZE (lnmode);
5385 lnbitpos = first_bit & ~ (lnbitsize - 1);
5386 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5387 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5389 if (BYTES_BIG_ENDIAN)
5391 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5392 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5395 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5396 size_int (xll_bitpos), 0);
5397 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5398 size_int (xrl_bitpos), 0);
5402 l_const = fold_convert (lntype, l_const);
5403 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5404 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5405 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5406 fold_build1 (BIT_NOT_EXPR,
5410 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5412 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5417 r_const = fold_convert (lntype, r_const);
5418 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5419 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5420 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5421 fold_build1 (BIT_NOT_EXPR,
5425 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5427 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5431 /* If the right sides are not constant, do the same for it. Also,
5432 disallow this optimization if a size or signedness mismatch occurs
5433 between the left and right sides. */
5436 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5437 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5438 /* Make sure the two fields on the right
5439 correspond to the left without being swapped. */
5440 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5443 first_bit = MIN (lr_bitpos, rr_bitpos);
5444 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5445 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5446 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5448 if (rnmode == VOIDmode)
5451 rnbitsize = GET_MODE_BITSIZE (rnmode);
5452 rnbitpos = first_bit & ~ (rnbitsize - 1);
5453 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5454 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5456 if (BYTES_BIG_ENDIAN)
5458 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5459 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5462 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5463 size_int (xlr_bitpos), 0);
5464 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5465 size_int (xrr_bitpos), 0);
5467 /* Make a mask that corresponds to both fields being compared.
5468 Do this for both items being compared. If the operands are the
5469 same size and the bits being compared are in the same position
5470 then we can do this by masking both and comparing the masked
5472 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5473 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5474 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5476 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5477 ll_unsignedp || rl_unsignedp);
5478 if (! all_ones_mask_p (ll_mask, lnbitsize))
5479 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5481 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5482 lr_unsignedp || rr_unsignedp);
5483 if (! all_ones_mask_p (lr_mask, rnbitsize))
5484 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5486 return build2 (wanted_code, truth_type, lhs, rhs);
5489 /* There is still another way we can do something: If both pairs of
5490 fields being compared are adjacent, we may be able to make a wider
5491 field containing them both.
5493 Note that we still must mask the lhs/rhs expressions. Furthermore,
5494 the mask must be shifted to account for the shift done by
5495 make_bit_field_ref. */
5496 if ((ll_bitsize + ll_bitpos == rl_bitpos
5497 && lr_bitsize + lr_bitpos == rr_bitpos)
5498 || (ll_bitpos == rl_bitpos + rl_bitsize
5499 && lr_bitpos == rr_bitpos + rr_bitsize))
5503 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5504 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5505 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5506 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5508 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5509 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5510 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5511 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5513 /* Convert to the smaller type before masking out unwanted bits. */
5515 if (lntype != rntype)
5517 if (lnbitsize > rnbitsize)
5519 lhs = fold_convert (rntype, lhs);
5520 ll_mask = fold_convert (rntype, ll_mask);
5523 else if (lnbitsize < rnbitsize)
5525 rhs = fold_convert (lntype, rhs);
5526 lr_mask = fold_convert (lntype, lr_mask);
5531 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5532 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5534 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5535 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5537 return build2 (wanted_code, truth_type, lhs, rhs);
5543 /* Handle the case of comparisons with constants. If there is something in
5544 common between the masks, those bits of the constants must be the same.
5545 If not, the condition is always false. Test for this to avoid generating
5546 incorrect code below. */
5547 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5548 if (! integer_zerop (result)
5549 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5550 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5552 if (wanted_code == NE_EXPR)
5554 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5555 return constant_boolean_node (true, truth_type);
5559 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5560 return constant_boolean_node (false, truth_type);
5564 /* Construct the expression we will return. First get the component
5565 reference we will make. Unless the mask is all ones the width of
5566 that field, perform the mask operation. Then compare with the
5568 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5569 ll_unsignedp || rl_unsignedp);
5571 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5572 if (! all_ones_mask_p (ll_mask, lnbitsize))
5573 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5575 return build2 (wanted_code, truth_type, result,
5576 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5579 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5583 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5586 enum tree_code op_code;
5587 tree comp_const = op1;
5589 int consts_equal, consts_lt;
5592 STRIP_SIGN_NOPS (arg0);
5594 op_code = TREE_CODE (arg0);
5595 minmax_const = TREE_OPERAND (arg0, 1);
5596 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5597 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5598 inner = TREE_OPERAND (arg0, 0);
5600 /* If something does not permit us to optimize, return the original tree. */
5601 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5602 || TREE_CODE (comp_const) != INTEGER_CST
5603 || TREE_OVERFLOW (comp_const)
5604 || TREE_CODE (minmax_const) != INTEGER_CST
5605 || TREE_OVERFLOW (minmax_const))
5608 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5609 and GT_EXPR, doing the rest with recursive calls using logical
5613 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5615 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5618 return invert_truthvalue (tem);
5624 fold_build2 (TRUTH_ORIF_EXPR, type,
5625 optimize_minmax_comparison
5626 (EQ_EXPR, type, arg0, comp_const),
5627 optimize_minmax_comparison
5628 (GT_EXPR, type, arg0, comp_const));
5631 if (op_code == MAX_EXPR && consts_equal)
5632 /* MAX (X, 0) == 0 -> X <= 0 */
5633 return fold_build2 (LE_EXPR, type, inner, comp_const);
5635 else if (op_code == MAX_EXPR && consts_lt)
5636 /* MAX (X, 0) == 5 -> X == 5 */
5637 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5639 else if (op_code == MAX_EXPR)
5640 /* MAX (X, 0) == -1 -> false */
5641 return omit_one_operand (type, integer_zero_node, inner);
5643 else if (consts_equal)
5644 /* MIN (X, 0) == 0 -> X >= 0 */
5645 return fold_build2 (GE_EXPR, type, inner, comp_const);
5648 /* MIN (X, 0) == 5 -> false */
5649 return omit_one_operand (type, integer_zero_node, inner);
5652 /* MIN (X, 0) == -1 -> X == -1 */
5653 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5656 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5657 /* MAX (X, 0) > 0 -> X > 0
5658 MAX (X, 0) > 5 -> X > 5 */
5659 return fold_build2 (GT_EXPR, type, inner, comp_const);
5661 else if (op_code == MAX_EXPR)
5662 /* MAX (X, 0) > -1 -> true */
5663 return omit_one_operand (type, integer_one_node, inner);
5665 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5666 /* MIN (X, 0) > 0 -> false
5667 MIN (X, 0) > 5 -> false */
5668 return omit_one_operand (type, integer_zero_node, inner);
5671 /* MIN (X, 0) > -1 -> X > -1 */
5672 return fold_build2 (GT_EXPR, type, inner, comp_const);
5679 /* T is an integer expression that is being multiplied, divided, or taken a
5680 modulus (CODE says which and what kind of divide or modulus) by a
5681 constant C. See if we can eliminate that operation by folding it with
5682 other operations already in T. WIDE_TYPE, if non-null, is a type that
5683 should be used for the computation if wider than our type.
5685 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5686 (X * 2) + (Y * 4). We must, however, be assured that either the original
5687 expression would not overflow or that overflow is undefined for the type
5688 in the language in question.
5690 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5691 the machine has a multiply-accumulate insn or that this is part of an
5692 addressing calculation.
5694 If we return a non-null expression, it is an equivalent form of the
5695 original computation, but need not be in the original type.
5697 We set *STRICT_OVERFLOW_P to true if the return values depends on
5698 signed overflow being undefined. Otherwise we do not change
5699 *STRICT_OVERFLOW_P. */
5702 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5703 bool *strict_overflow_p)
5705 /* To avoid exponential search depth, refuse to allow recursion past
5706 three levels. Beyond that (1) it's highly unlikely that we'll find
5707 something interesting and (2) we've probably processed it before
5708 when we built the inner expression. */
5717 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5724 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5725 bool *strict_overflow_p)
5727 tree type = TREE_TYPE (t);
5728 enum tree_code tcode = TREE_CODE (t);
5729 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5730 > GET_MODE_SIZE (TYPE_MODE (type)))
5731 ? wide_type : type);
5733 int same_p = tcode == code;
5734 tree op0 = NULL_TREE, op1 = NULL_TREE;
5735 bool sub_strict_overflow_p;
5737 /* Don't deal with constants of zero here; they confuse the code below. */
5738 if (integer_zerop (c))
5741 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5742 op0 = TREE_OPERAND (t, 0);
5744 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5745 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5747 /* Note that we need not handle conditional operations here since fold
5748 already handles those cases. So just do arithmetic here. */
5752 /* For a constant, we can always simplify if we are a multiply
5753 or (for divide and modulus) if it is a multiple of our constant. */
5754 if (code == MULT_EXPR
5755 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5756 return const_binop (code, fold_convert (ctype, t),
5757 fold_convert (ctype, c), 0);
5760 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5761 /* If op0 is an expression ... */
5762 if ((COMPARISON_CLASS_P (op0)
5763 || UNARY_CLASS_P (op0)
5764 || BINARY_CLASS_P (op0)
5765 || VL_EXP_CLASS_P (op0)
5766 || EXPRESSION_CLASS_P (op0))
5767 /* ... and is unsigned, and its type is smaller than ctype,
5768 then we cannot pass through as widening. */
5769 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5770 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5771 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5772 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5773 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5774 /* ... or this is a truncation (t is narrower than op0),
5775 then we cannot pass through this narrowing. */
5776 || (GET_MODE_SIZE (TYPE_MODE (type))
5777 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5778 /* ... or signedness changes for division or modulus,
5779 then we cannot pass through this conversion. */
5780 || (code != MULT_EXPR
5781 && (TYPE_UNSIGNED (ctype)
5782 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5785 /* Pass the constant down and see if we can make a simplification. If
5786 we can, replace this expression with the inner simplification for
5787 possible later conversion to our or some other type. */
5788 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5789 && TREE_CODE (t2) == INTEGER_CST
5790 && !TREE_OVERFLOW (t2)
5791 && (0 != (t1 = extract_muldiv (op0, t2, code,
5793 ? ctype : NULL_TREE,
5794 strict_overflow_p))))
5799 /* If widening the type changes it from signed to unsigned, then we
5800 must avoid building ABS_EXPR itself as unsigned. */
5801 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5803 tree cstype = (*signed_type_for) (ctype);
5804 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5807 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5808 return fold_convert (ctype, t1);
5814 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5816 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5819 case MIN_EXPR: case MAX_EXPR:
5820 /* If widening the type changes the signedness, then we can't perform
5821 this optimization as that changes the result. */
5822 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5825 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5826 sub_strict_overflow_p = false;
5827 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5828 &sub_strict_overflow_p)) != 0
5829 && (t2 = extract_muldiv (op1, c, code, wide_type,
5830 &sub_strict_overflow_p)) != 0)
5832 if (tree_int_cst_sgn (c) < 0)
5833 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5834 if (sub_strict_overflow_p)
5835 *strict_overflow_p = true;
5836 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5837 fold_convert (ctype, t2));
5841 case LSHIFT_EXPR: case RSHIFT_EXPR:
5842 /* If the second operand is constant, this is a multiplication
5843 or floor division, by a power of two, so we can treat it that
5844 way unless the multiplier or divisor overflows. Signed
5845 left-shift overflow is implementation-defined rather than
5846 undefined in C90, so do not convert signed left shift into
5848 if (TREE_CODE (op1) == INTEGER_CST
5849 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5850 /* const_binop may not detect overflow correctly,
5851 so check for it explicitly here. */
5852 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5853 && TREE_INT_CST_HIGH (op1) == 0
5854 && 0 != (t1 = fold_convert (ctype,
5855 const_binop (LSHIFT_EXPR,
5858 && !TREE_OVERFLOW (t1))
5859 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5860 ? MULT_EXPR : FLOOR_DIV_EXPR,
5861 ctype, fold_convert (ctype, op0), t1),
5862 c, code, wide_type, strict_overflow_p);
5865 case PLUS_EXPR: case MINUS_EXPR:
5866 /* See if we can eliminate the operation on both sides. If we can, we
5867 can return a new PLUS or MINUS. If we can't, the only remaining
5868 cases where we can do anything are if the second operand is a
5870 sub_strict_overflow_p = false;
5871 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5872 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5873 if (t1 != 0 && t2 != 0
5874 && (code == MULT_EXPR
5875 /* If not multiplication, we can only do this if both operands
5876 are divisible by c. */
5877 || (multiple_of_p (ctype, op0, c)
5878 && multiple_of_p (ctype, op1, c))))
5880 if (sub_strict_overflow_p)
5881 *strict_overflow_p = true;
5882 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5883 fold_convert (ctype, t2));
5886 /* If this was a subtraction, negate OP1 and set it to be an addition.
5887 This simplifies the logic below. */
5888 if (tcode == MINUS_EXPR)
5889 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5891 if (TREE_CODE (op1) != INTEGER_CST)
5894 /* If either OP1 or C are negative, this optimization is not safe for
5895 some of the division and remainder types while for others we need
5896 to change the code. */
5897 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5899 if (code == CEIL_DIV_EXPR)
5900 code = FLOOR_DIV_EXPR;
5901 else if (code == FLOOR_DIV_EXPR)
5902 code = CEIL_DIV_EXPR;
5903 else if (code != MULT_EXPR
5904 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5908 /* If it's a multiply or a division/modulus operation of a multiple
5909 of our constant, do the operation and verify it doesn't overflow. */
5910 if (code == MULT_EXPR
5911 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5913 op1 = const_binop (code, fold_convert (ctype, op1),
5914 fold_convert (ctype, c), 0);
5915 /* We allow the constant to overflow with wrapping semantics. */
5917 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5923 /* If we have an unsigned type is not a sizetype, we cannot widen
5924 the operation since it will change the result if the original
5925 computation overflowed. */
5926 if (TYPE_UNSIGNED (ctype)
5927 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5931 /* If we were able to eliminate our operation from the first side,
5932 apply our operation to the second side and reform the PLUS. */
5933 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5934 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5936 /* The last case is if we are a multiply. In that case, we can
5937 apply the distributive law to commute the multiply and addition
5938 if the multiplication of the constants doesn't overflow. */
5939 if (code == MULT_EXPR)
5940 return fold_build2 (tcode, ctype,
5941 fold_build2 (code, ctype,
5942 fold_convert (ctype, op0),
5943 fold_convert (ctype, c)),
5949 /* We have a special case here if we are doing something like
5950 (C * 8) % 4 since we know that's zero. */
5951 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5952 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5953 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5954 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5955 return omit_one_operand (type, integer_zero_node, op0);
5957 /* ... fall through ... */
5959 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5960 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5961 /* If we can extract our operation from the LHS, do so and return a
5962 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5963 do something only if the second operand is a constant. */
5965 && (t1 = extract_muldiv (op0, c, code, wide_type,
5966 strict_overflow_p)) != 0)
5967 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5968 fold_convert (ctype, op1));
5969 else if (tcode == MULT_EXPR && code == MULT_EXPR
5970 && (t1 = extract_muldiv (op1, c, code, wide_type,
5971 strict_overflow_p)) != 0)
5972 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5973 fold_convert (ctype, t1));
5974 else if (TREE_CODE (op1) != INTEGER_CST)
5977 /* If these are the same operation types, we can associate them
5978 assuming no overflow. */
5980 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5981 fold_convert (ctype, c), 0))
5982 && !TREE_OVERFLOW (t1))
5983 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5985 /* If these operations "cancel" each other, we have the main
5986 optimizations of this pass, which occur when either constant is a
5987 multiple of the other, in which case we replace this with either an
5988 operation or CODE or TCODE.
5990 If we have an unsigned type that is not a sizetype, we cannot do
5991 this since it will change the result if the original computation
5993 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5994 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5995 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5996 || (tcode == MULT_EXPR
5997 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5998 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
6000 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6002 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6003 *strict_overflow_p = true;
6004 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6005 fold_convert (ctype,
6006 const_binop (TRUNC_DIV_EXPR,
6009 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6011 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6012 *strict_overflow_p = true;
6013 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6014 fold_convert (ctype,
6015 const_binop (TRUNC_DIV_EXPR,
6028 /* Return a node which has the indicated constant VALUE (either 0 or
6029 1), and is of the indicated TYPE. */
6032 constant_boolean_node (int value, tree type)
6034 if (type == integer_type_node)
6035 return value ? integer_one_node : integer_zero_node;
6036 else if (type == boolean_type_node)
6037 return value ? boolean_true_node : boolean_false_node;
6039 return build_int_cst (type, value);
6043 /* Return true if expr looks like an ARRAY_REF and set base and
6044 offset to the appropriate trees. If there is no offset,
6045 offset is set to NULL_TREE. Base will be canonicalized to
6046 something you can get the element type from using
6047 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
6048 in bytes to the base in sizetype. */
6051 extract_array_ref (tree expr, tree *base, tree *offset)
6053 /* One canonical form is a PLUS_EXPR with the first
6054 argument being an ADDR_EXPR with a possible NOP_EXPR
6056 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
6058 tree op0 = TREE_OPERAND (expr, 0);
6059 tree inner_base, dummy1;
6060 /* Strip NOP_EXPRs here because the C frontends and/or
6061 folders present us (int *)&x.a p+ 4 possibly. */
6063 if (extract_array_ref (op0, &inner_base, &dummy1))
6066 *offset = fold_convert (sizetype, TREE_OPERAND (expr, 1));
6067 if (dummy1 != NULL_TREE)
6068 *offset = fold_build2 (PLUS_EXPR, sizetype,
6073 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
6074 which we transform into an ADDR_EXPR with appropriate
6075 offset. For other arguments to the ADDR_EXPR we assume
6076 zero offset and as such do not care about the ADDR_EXPR
6077 type and strip possible nops from it. */
6078 else if (TREE_CODE (expr) == ADDR_EXPR)
6080 tree op0 = TREE_OPERAND (expr, 0);
6081 if (TREE_CODE (op0) == ARRAY_REF)
6083 tree idx = TREE_OPERAND (op0, 1);
6084 *base = TREE_OPERAND (op0, 0);
6085 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6086 array_ref_element_size (op0));
6087 *offset = fold_convert (sizetype, *offset);
6091 /* Handle array-to-pointer decay as &a. */
6092 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6093 *base = TREE_OPERAND (expr, 0);
6096 *offset = NULL_TREE;
6100 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6101 else if (SSA_VAR_P (expr)
6102 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6105 *offset = NULL_TREE;
6113 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6114 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6115 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6116 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6117 COND is the first argument to CODE; otherwise (as in the example
6118 given here), it is the second argument. TYPE is the type of the
6119 original expression. Return NULL_TREE if no simplification is
6123 fold_binary_op_with_conditional_arg (enum tree_code code,
6124 tree type, tree op0, tree op1,
6125 tree cond, tree arg, int cond_first_p)
6127 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6128 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6129 tree test, true_value, false_value;
6130 tree lhs = NULL_TREE;
6131 tree rhs = NULL_TREE;
6133 /* This transformation is only worthwhile if we don't have to wrap
6134 arg in a SAVE_EXPR, and the operation can be simplified on at least
6135 one of the branches once its pushed inside the COND_EXPR. */
6136 if (!TREE_CONSTANT (arg))
6139 if (TREE_CODE (cond) == COND_EXPR)
6141 test = TREE_OPERAND (cond, 0);
6142 true_value = TREE_OPERAND (cond, 1);
6143 false_value = TREE_OPERAND (cond, 2);
6144 /* If this operand throws an expression, then it does not make
6145 sense to try to perform a logical or arithmetic operation
6147 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6149 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6154 tree testtype = TREE_TYPE (cond);
6156 true_value = constant_boolean_node (true, testtype);
6157 false_value = constant_boolean_node (false, testtype);
6160 arg = fold_convert (arg_type, arg);
6163 true_value = fold_convert (cond_type, true_value);
6165 lhs = fold_build2 (code, type, true_value, arg);
6167 lhs = fold_build2 (code, type, arg, true_value);
6171 false_value = fold_convert (cond_type, false_value);
6173 rhs = fold_build2 (code, type, false_value, arg);
6175 rhs = fold_build2 (code, type, arg, false_value);
6178 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6179 return fold_convert (type, test);
6183 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6185 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6186 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6187 ADDEND is the same as X.
6189 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6190 and finite. The problematic cases are when X is zero, and its mode
6191 has signed zeros. In the case of rounding towards -infinity,
6192 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6193 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6196 fold_real_zero_addition_p (tree type, tree addend, int negate)
6198 if (!real_zerop (addend))
6201 /* Don't allow the fold with -fsignaling-nans. */
6202 if (HONOR_SNANS (TYPE_MODE (type)))
6205 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6206 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6209 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6210 if (TREE_CODE (addend) == REAL_CST
6211 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6214 /* The mode has signed zeros, and we have to honor their sign.
6215 In this situation, there is only one case we can return true for.
6216 X - 0 is the same as X unless rounding towards -infinity is
6218 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6221 /* Subroutine of fold() that checks comparisons of built-in math
6222 functions against real constants.
6224 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6225 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6226 is the type of the result and ARG0 and ARG1 are the operands of the
6227 comparison. ARG1 must be a TREE_REAL_CST.
6229 The function returns the constant folded tree if a simplification
6230 can be made, and NULL_TREE otherwise. */
6233 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6234 tree type, tree arg0, tree arg1)
6238 if (BUILTIN_SQRT_P (fcode))
6240 tree arg = CALL_EXPR_ARG (arg0, 0);
6241 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6243 c = TREE_REAL_CST (arg1);
6244 if (REAL_VALUE_NEGATIVE (c))
6246 /* sqrt(x) < y is always false, if y is negative. */
6247 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6248 return omit_one_operand (type, integer_zero_node, arg);
6250 /* sqrt(x) > y is always true, if y is negative and we
6251 don't care about NaNs, i.e. negative values of x. */
6252 if (code == NE_EXPR || !HONOR_NANS (mode))
6253 return omit_one_operand (type, integer_one_node, arg);
6255 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6256 return fold_build2 (GE_EXPR, type, arg,
6257 build_real (TREE_TYPE (arg), dconst0));
6259 else if (code == GT_EXPR || code == GE_EXPR)
6263 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6264 real_convert (&c2, mode, &c2);
6266 if (REAL_VALUE_ISINF (c2))
6268 /* sqrt(x) > y is x == +Inf, when y is very large. */
6269 if (HONOR_INFINITIES (mode))
6270 return fold_build2 (EQ_EXPR, type, arg,
6271 build_real (TREE_TYPE (arg), c2));
6273 /* sqrt(x) > y is always false, when y is very large
6274 and we don't care about infinities. */
6275 return omit_one_operand (type, integer_zero_node, arg);
6278 /* sqrt(x) > c is the same as x > c*c. */
6279 return fold_build2 (code, type, arg,
6280 build_real (TREE_TYPE (arg), c2));
6282 else if (code == LT_EXPR || code == LE_EXPR)
6286 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6287 real_convert (&c2, mode, &c2);
6289 if (REAL_VALUE_ISINF (c2))
6291 /* sqrt(x) < y is always true, when y is a very large
6292 value and we don't care about NaNs or Infinities. */
6293 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6294 return omit_one_operand (type, integer_one_node, arg);
6296 /* sqrt(x) < y is x != +Inf when y is very large and we
6297 don't care about NaNs. */
6298 if (! HONOR_NANS (mode))
6299 return fold_build2 (NE_EXPR, type, arg,
6300 build_real (TREE_TYPE (arg), c2));
6302 /* sqrt(x) < y is x >= 0 when y is very large and we
6303 don't care about Infinities. */
6304 if (! HONOR_INFINITIES (mode))
6305 return fold_build2 (GE_EXPR, type, arg,
6306 build_real (TREE_TYPE (arg), dconst0));
6308 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6309 if (lang_hooks.decls.global_bindings_p () != 0
6310 || CONTAINS_PLACEHOLDER_P (arg))
6313 arg = save_expr (arg);
6314 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6315 fold_build2 (GE_EXPR, type, arg,
6316 build_real (TREE_TYPE (arg),
6318 fold_build2 (NE_EXPR, type, arg,
6319 build_real (TREE_TYPE (arg),
6323 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6324 if (! HONOR_NANS (mode))
6325 return fold_build2 (code, type, arg,
6326 build_real (TREE_TYPE (arg), c2));
6328 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6329 if (lang_hooks.decls.global_bindings_p () == 0
6330 && ! CONTAINS_PLACEHOLDER_P (arg))
6332 arg = save_expr (arg);
6333 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6334 fold_build2 (GE_EXPR, type, arg,
6335 build_real (TREE_TYPE (arg),
6337 fold_build2 (code, type, arg,
6338 build_real (TREE_TYPE (arg),
6347 /* Subroutine of fold() that optimizes comparisons against Infinities,
6348 either +Inf or -Inf.
6350 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6351 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6352 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6354 The function returns the constant folded tree if a simplification
6355 can be made, and NULL_TREE otherwise. */
6358 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6360 enum machine_mode mode;
6361 REAL_VALUE_TYPE max;
6365 mode = TYPE_MODE (TREE_TYPE (arg0));
6367 /* For negative infinity swap the sense of the comparison. */
6368 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6370 code = swap_tree_comparison (code);
6375 /* x > +Inf is always false, if with ignore sNANs. */
6376 if (HONOR_SNANS (mode))
6378 return omit_one_operand (type, integer_zero_node, arg0);
6381 /* x <= +Inf is always true, if we don't case about NaNs. */
6382 if (! HONOR_NANS (mode))
6383 return omit_one_operand (type, integer_one_node, arg0);
6385 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6386 if (lang_hooks.decls.global_bindings_p () == 0
6387 && ! CONTAINS_PLACEHOLDER_P (arg0))
6389 arg0 = save_expr (arg0);
6390 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6396 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6397 real_maxval (&max, neg, mode);
6398 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6399 arg0, build_real (TREE_TYPE (arg0), max));
6402 /* x < +Inf is always equal to x <= DBL_MAX. */
6403 real_maxval (&max, neg, mode);
6404 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6405 arg0, build_real (TREE_TYPE (arg0), max));
6408 /* x != +Inf is always equal to !(x > DBL_MAX). */
6409 real_maxval (&max, neg, mode);
6410 if (! HONOR_NANS (mode))
6411 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6412 arg0, build_real (TREE_TYPE (arg0), max));
6414 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6415 arg0, build_real (TREE_TYPE (arg0), max));
6416 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6425 /* Subroutine of fold() that optimizes comparisons of a division by
6426 a nonzero integer constant against an integer constant, i.e.
6429 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6430 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6431 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6433 The function returns the constant folded tree if a simplification
6434 can be made, and NULL_TREE otherwise. */
6437 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6439 tree prod, tmp, hi, lo;
6440 tree arg00 = TREE_OPERAND (arg0, 0);
6441 tree arg01 = TREE_OPERAND (arg0, 1);
6442 unsigned HOST_WIDE_INT lpart;
6443 HOST_WIDE_INT hpart;
6444 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6448 /* We have to do this the hard way to detect unsigned overflow.
6449 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6450 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6451 TREE_INT_CST_HIGH (arg01),
6452 TREE_INT_CST_LOW (arg1),
6453 TREE_INT_CST_HIGH (arg1),
6454 &lpart, &hpart, unsigned_p);
6455 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6457 neg_overflow = false;
6461 tmp = int_const_binop (MINUS_EXPR, arg01,
6462 build_int_cst (TREE_TYPE (arg01), 1), 0);
6465 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6466 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6467 TREE_INT_CST_HIGH (prod),
6468 TREE_INT_CST_LOW (tmp),
6469 TREE_INT_CST_HIGH (tmp),
6470 &lpart, &hpart, unsigned_p);
6471 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6472 -1, overflow | TREE_OVERFLOW (prod));
6474 else if (tree_int_cst_sgn (arg01) >= 0)
6476 tmp = int_const_binop (MINUS_EXPR, arg01,
6477 build_int_cst (TREE_TYPE (arg01), 1), 0);
6478 switch (tree_int_cst_sgn (arg1))
6481 neg_overflow = true;
6482 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6487 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6492 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6502 /* A negative divisor reverses the relational operators. */
6503 code = swap_tree_comparison (code);
6505 tmp = int_const_binop (PLUS_EXPR, arg01,
6506 build_int_cst (TREE_TYPE (arg01), 1), 0);
6507 switch (tree_int_cst_sgn (arg1))
6510 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6515 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6520 neg_overflow = true;
6521 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6533 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6534 return omit_one_operand (type, integer_zero_node, arg00);
6535 if (TREE_OVERFLOW (hi))
6536 return fold_build2 (GE_EXPR, type, arg00, lo);
6537 if (TREE_OVERFLOW (lo))
6538 return fold_build2 (LE_EXPR, type, arg00, hi);
6539 return build_range_check (type, arg00, 1, lo, hi);
6542 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6543 return omit_one_operand (type, integer_one_node, arg00);
6544 if (TREE_OVERFLOW (hi))
6545 return fold_build2 (LT_EXPR, type, arg00, lo);
6546 if (TREE_OVERFLOW (lo))
6547 return fold_build2 (GT_EXPR, type, arg00, hi);
6548 return build_range_check (type, arg00, 0, lo, hi);
6551 if (TREE_OVERFLOW (lo))
6553 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6554 return omit_one_operand (type, tmp, arg00);
6556 return fold_build2 (LT_EXPR, type, arg00, lo);
6559 if (TREE_OVERFLOW (hi))
6561 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6562 return omit_one_operand (type, tmp, arg00);
6564 return fold_build2 (LE_EXPR, type, arg00, hi);
6567 if (TREE_OVERFLOW (hi))
6569 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6570 return omit_one_operand (type, tmp, arg00);
6572 return fold_build2 (GT_EXPR, type, arg00, hi);
6575 if (TREE_OVERFLOW (lo))
6577 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6578 return omit_one_operand (type, tmp, arg00);
6580 return fold_build2 (GE_EXPR, type, arg00, lo);
6590 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6591 equality/inequality test, then return a simplified form of the test
6592 using a sign testing. Otherwise return NULL. TYPE is the desired
6596 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6599 /* If this is testing a single bit, we can optimize the test. */
6600 if ((code == NE_EXPR || code == EQ_EXPR)
6601 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6602 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6604 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6605 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6606 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6608 if (arg00 != NULL_TREE
6609 /* This is only a win if casting to a signed type is cheap,
6610 i.e. when arg00's type is not a partial mode. */
6611 && TYPE_PRECISION (TREE_TYPE (arg00))
6612 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6614 tree stype = signed_type_for (TREE_TYPE (arg00));
6615 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6616 result_type, fold_convert (stype, arg00),
6617 build_int_cst (stype, 0));
6624 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6625 equality/inequality test, then return a simplified form of
6626 the test using shifts and logical operations. Otherwise return
6627 NULL. TYPE is the desired result type. */
6630 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6633 /* If this is testing a single bit, we can optimize the test. */
6634 if ((code == NE_EXPR || code == EQ_EXPR)
6635 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6636 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6638 tree inner = TREE_OPERAND (arg0, 0);
6639 tree type = TREE_TYPE (arg0);
6640 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6641 enum machine_mode operand_mode = TYPE_MODE (type);
6643 tree signed_type, unsigned_type, intermediate_type;
6646 /* First, see if we can fold the single bit test into a sign-bit
6648 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6653 /* Otherwise we have (A & C) != 0 where C is a single bit,
6654 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6655 Similarly for (A & C) == 0. */
6657 /* If INNER is a right shift of a constant and it plus BITNUM does
6658 not overflow, adjust BITNUM and INNER. */
6659 if (TREE_CODE (inner) == RSHIFT_EXPR
6660 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6661 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6662 && bitnum < TYPE_PRECISION (type)
6663 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6664 bitnum - TYPE_PRECISION (type)))
6666 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6667 inner = TREE_OPERAND (inner, 0);
6670 /* If we are going to be able to omit the AND below, we must do our
6671 operations as unsigned. If we must use the AND, we have a choice.
6672 Normally unsigned is faster, but for some machines signed is. */
6673 #ifdef LOAD_EXTEND_OP
6674 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6675 && !flag_syntax_only) ? 0 : 1;
6680 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6681 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6682 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6683 inner = fold_convert (intermediate_type, inner);
6686 inner = build2 (RSHIFT_EXPR, intermediate_type,
6687 inner, size_int (bitnum));
6689 one = build_int_cst (intermediate_type, 1);
6691 if (code == EQ_EXPR)
6692 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6694 /* Put the AND last so it can combine with more things. */
6695 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6697 /* Make sure to return the proper type. */
6698 inner = fold_convert (result_type, inner);
6705 /* Check whether we are allowed to reorder operands arg0 and arg1,
6706 such that the evaluation of arg1 occurs before arg0. */
6709 reorder_operands_p (tree arg0, tree arg1)
6711 if (! flag_evaluation_order)
6713 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6715 return ! TREE_SIDE_EFFECTS (arg0)
6716 && ! TREE_SIDE_EFFECTS (arg1);
6719 /* Test whether it is preferable two swap two operands, ARG0 and
6720 ARG1, for example because ARG0 is an integer constant and ARG1
6721 isn't. If REORDER is true, only recommend swapping if we can
6722 evaluate the operands in reverse order. */
6725 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6727 STRIP_SIGN_NOPS (arg0);
6728 STRIP_SIGN_NOPS (arg1);
6730 if (TREE_CODE (arg1) == INTEGER_CST)
6732 if (TREE_CODE (arg0) == INTEGER_CST)
6735 if (TREE_CODE (arg1) == REAL_CST)
6737 if (TREE_CODE (arg0) == REAL_CST)
6740 if (TREE_CODE (arg1) == COMPLEX_CST)
6742 if (TREE_CODE (arg0) == COMPLEX_CST)
6745 if (TREE_CONSTANT (arg1))
6747 if (TREE_CONSTANT (arg0))
6753 if (reorder && flag_evaluation_order
6754 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6757 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6758 for commutative and comparison operators. Ensuring a canonical
6759 form allows the optimizers to find additional redundancies without
6760 having to explicitly check for both orderings. */
6761 if (TREE_CODE (arg0) == SSA_NAME
6762 && TREE_CODE (arg1) == SSA_NAME
6763 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6766 /* Put SSA_NAMEs last. */
6767 if (TREE_CODE (arg1) == SSA_NAME)
6769 if (TREE_CODE (arg0) == SSA_NAME)
6772 /* Put variables last. */
6781 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6782 ARG0 is extended to a wider type. */
6785 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6787 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6789 tree shorter_type, outer_type;
6793 if (arg0_unw == arg0)
6795 shorter_type = TREE_TYPE (arg0_unw);
6797 #ifdef HAVE_canonicalize_funcptr_for_compare
6798 /* Disable this optimization if we're casting a function pointer
6799 type on targets that require function pointer canonicalization. */
6800 if (HAVE_canonicalize_funcptr_for_compare
6801 && TREE_CODE (shorter_type) == POINTER_TYPE
6802 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6806 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6809 arg1_unw = get_unwidened (arg1, shorter_type);
6811 /* If possible, express the comparison in the shorter mode. */
6812 if ((code == EQ_EXPR || code == NE_EXPR
6813 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6814 && (TREE_TYPE (arg1_unw) == shorter_type
6815 || (TREE_CODE (arg1_unw) == INTEGER_CST
6816 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6817 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6818 && int_fits_type_p (arg1_unw, shorter_type))))
6819 return fold_build2 (code, type, arg0_unw,
6820 fold_convert (shorter_type, arg1_unw));
6822 if (TREE_CODE (arg1_unw) != INTEGER_CST
6823 || TREE_CODE (shorter_type) != INTEGER_TYPE
6824 || !int_fits_type_p (arg1_unw, shorter_type))
6827 /* If we are comparing with the integer that does not fit into the range
6828 of the shorter type, the result is known. */
6829 outer_type = TREE_TYPE (arg1_unw);
6830 min = lower_bound_in_type (outer_type, shorter_type);
6831 max = upper_bound_in_type (outer_type, shorter_type);
6833 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6835 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6842 return omit_one_operand (type, integer_zero_node, arg0);
6847 return omit_one_operand (type, integer_one_node, arg0);
6853 return omit_one_operand (type, integer_one_node, arg0);
6855 return omit_one_operand (type, integer_zero_node, arg0);
6860 return omit_one_operand (type, integer_zero_node, arg0);
6862 return omit_one_operand (type, integer_one_node, arg0);
6871 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6872 ARG0 just the signedness is changed. */
6875 fold_sign_changed_comparison (enum tree_code code, tree type,
6876 tree arg0, tree arg1)
6879 tree inner_type, outer_type;
6881 if (TREE_CODE (arg0) != NOP_EXPR
6882 && TREE_CODE (arg0) != CONVERT_EXPR)
6885 outer_type = TREE_TYPE (arg0);
6886 arg0_inner = TREE_OPERAND (arg0, 0);
6887 inner_type = TREE_TYPE (arg0_inner);
6889 #ifdef HAVE_canonicalize_funcptr_for_compare
6890 /* Disable this optimization if we're casting a function pointer
6891 type on targets that require function pointer canonicalization. */
6892 if (HAVE_canonicalize_funcptr_for_compare
6893 && TREE_CODE (inner_type) == POINTER_TYPE
6894 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6898 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6901 if (TREE_CODE (arg1) != INTEGER_CST
6902 && !((TREE_CODE (arg1) == NOP_EXPR
6903 || TREE_CODE (arg1) == CONVERT_EXPR)
6904 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6907 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6912 if (TREE_CODE (arg1) == INTEGER_CST)
6913 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6914 TREE_INT_CST_HIGH (arg1), 0,
6915 TREE_OVERFLOW (arg1));
6917 arg1 = fold_convert (inner_type, arg1);
6919 return fold_build2 (code, type, arg0_inner, arg1);
6922 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6923 step of the array. Reconstructs s and delta in the case of s * delta
6924 being an integer constant (and thus already folded).
6925 ADDR is the address. MULT is the multiplicative expression.
6926 If the function succeeds, the new address expression is returned. Otherwise
6927 NULL_TREE is returned. */
6930 try_move_mult_to_index (tree addr, tree op1)
6932 tree s, delta, step;
6933 tree ref = TREE_OPERAND (addr, 0), pref;
6938 /* Strip the nops that might be added when converting op1 to sizetype. */
6941 /* Canonicalize op1 into a possibly non-constant delta
6942 and an INTEGER_CST s. */
6943 if (TREE_CODE (op1) == MULT_EXPR)
6945 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6950 if (TREE_CODE (arg0) == INTEGER_CST)
6955 else if (TREE_CODE (arg1) == INTEGER_CST)
6963 else if (TREE_CODE (op1) == INTEGER_CST)
6970 /* Simulate we are delta * 1. */
6972 s = integer_one_node;
6975 for (;; ref = TREE_OPERAND (ref, 0))
6977 if (TREE_CODE (ref) == ARRAY_REF)
6979 /* Remember if this was a multi-dimensional array. */
6980 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6983 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6987 step = array_ref_element_size (ref);
6988 if (TREE_CODE (step) != INTEGER_CST)
6993 if (! tree_int_cst_equal (step, s))
6998 /* Try if delta is a multiple of step. */
6999 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
7005 /* Only fold here if we can verify we do not overflow one
7006 dimension of a multi-dimensional array. */
7011 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7012 || !INTEGRAL_TYPE_P (itype)
7013 || !TYPE_MAX_VALUE (itype)
7014 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7017 tmp = fold_binary (PLUS_EXPR, itype,
7018 fold_convert (itype,
7019 TREE_OPERAND (ref, 1)),
7020 fold_convert (itype, delta));
7022 || TREE_CODE (tmp) != INTEGER_CST
7023 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7032 if (!handled_component_p (ref))
7036 /* We found the suitable array reference. So copy everything up to it,
7037 and replace the index. */
7039 pref = TREE_OPERAND (addr, 0);
7040 ret = copy_node (pref);
7045 pref = TREE_OPERAND (pref, 0);
7046 TREE_OPERAND (pos, 0) = copy_node (pref);
7047 pos = TREE_OPERAND (pos, 0);
7050 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7051 fold_convert (itype,
7052 TREE_OPERAND (pos, 1)),
7053 fold_convert (itype, delta));
7055 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7059 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7060 means A >= Y && A != MAX, but in this case we know that
7061 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7064 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7066 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7068 if (TREE_CODE (bound) == LT_EXPR)
7069 a = TREE_OPERAND (bound, 0);
7070 else if (TREE_CODE (bound) == GT_EXPR)
7071 a = TREE_OPERAND (bound, 1);
7075 typea = TREE_TYPE (a);
7076 if (!INTEGRAL_TYPE_P (typea)
7077 && !POINTER_TYPE_P (typea))
7080 if (TREE_CODE (ineq) == LT_EXPR)
7082 a1 = TREE_OPERAND (ineq, 1);
7083 y = TREE_OPERAND (ineq, 0);
7085 else if (TREE_CODE (ineq) == GT_EXPR)
7087 a1 = TREE_OPERAND (ineq, 0);
7088 y = TREE_OPERAND (ineq, 1);
7093 if (TREE_TYPE (a1) != typea)
7096 if (POINTER_TYPE_P (typea))
7098 /* Convert the pointer types into integer before taking the difference. */
7099 tree ta = fold_convert (ssizetype, a);
7100 tree ta1 = fold_convert (ssizetype, a1);
7101 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7104 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7106 if (!diff || !integer_onep (diff))
7109 return fold_build2 (GE_EXPR, type, a, y);
7112 /* Fold a sum or difference of at least one multiplication.
7113 Returns the folded tree or NULL if no simplification could be made. */
7116 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7118 tree arg00, arg01, arg10, arg11;
7119 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7121 /* (A * C) +- (B * C) -> (A+-B) * C.
7122 (A * C) +- A -> A * (C+-1).
7123 We are most concerned about the case where C is a constant,
7124 but other combinations show up during loop reduction. Since
7125 it is not difficult, try all four possibilities. */
7127 if (TREE_CODE (arg0) == MULT_EXPR)
7129 arg00 = TREE_OPERAND (arg0, 0);
7130 arg01 = TREE_OPERAND (arg0, 1);
7132 else if (TREE_CODE (arg0) == INTEGER_CST)
7134 arg00 = build_one_cst (type);
7140 arg01 = build_one_cst (type);
7142 if (TREE_CODE (arg1) == MULT_EXPR)
7144 arg10 = TREE_OPERAND (arg1, 0);
7145 arg11 = TREE_OPERAND (arg1, 1);
7147 else if (TREE_CODE (arg1) == INTEGER_CST)
7149 arg10 = build_one_cst (type);
7155 arg11 = build_one_cst (type);
7159 if (operand_equal_p (arg01, arg11, 0))
7160 same = arg01, alt0 = arg00, alt1 = arg10;
7161 else if (operand_equal_p (arg00, arg10, 0))
7162 same = arg00, alt0 = arg01, alt1 = arg11;
7163 else if (operand_equal_p (arg00, arg11, 0))
7164 same = arg00, alt0 = arg01, alt1 = arg10;
7165 else if (operand_equal_p (arg01, arg10, 0))
7166 same = arg01, alt0 = arg00, alt1 = arg11;
7168 /* No identical multiplicands; see if we can find a common
7169 power-of-two factor in non-power-of-two multiplies. This
7170 can help in multi-dimensional array access. */
7171 else if (host_integerp (arg01, 0)
7172 && host_integerp (arg11, 0))
7174 HOST_WIDE_INT int01, int11, tmp;
7177 int01 = TREE_INT_CST_LOW (arg01);
7178 int11 = TREE_INT_CST_LOW (arg11);
7180 /* Move min of absolute values to int11. */
7181 if ((int01 >= 0 ? int01 : -int01)
7182 < (int11 >= 0 ? int11 : -int11))
7184 tmp = int01, int01 = int11, int11 = tmp;
7185 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7192 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7194 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7195 build_int_cst (TREE_TYPE (arg00),
7200 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7205 return fold_build2 (MULT_EXPR, type,
7206 fold_build2 (code, type,
7207 fold_convert (type, alt0),
7208 fold_convert (type, alt1)),
7209 fold_convert (type, same));
7214 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7215 specified by EXPR into the buffer PTR of length LEN bytes.
7216 Return the number of bytes placed in the buffer, or zero
7220 native_encode_int (tree expr, unsigned char *ptr, int len)
7222 tree type = TREE_TYPE (expr);
7223 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7224 int byte, offset, word, words;
7225 unsigned char value;
7227 if (total_bytes > len)
7229 words = total_bytes / UNITS_PER_WORD;
7231 for (byte = 0; byte < total_bytes; byte++)
7233 int bitpos = byte * BITS_PER_UNIT;
7234 if (bitpos < HOST_BITS_PER_WIDE_INT)
7235 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7237 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7238 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7240 if (total_bytes > UNITS_PER_WORD)
7242 word = byte / UNITS_PER_WORD;
7243 if (WORDS_BIG_ENDIAN)
7244 word = (words - 1) - word;
7245 offset = word * UNITS_PER_WORD;
7246 if (BYTES_BIG_ENDIAN)
7247 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7249 offset += byte % UNITS_PER_WORD;
7252 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7253 ptr[offset] = value;
7259 /* Subroutine of native_encode_expr. Encode the REAL_CST
7260 specified by EXPR into the buffer PTR of length LEN bytes.
7261 Return the number of bytes placed in the buffer, or zero
7265 native_encode_real (tree expr, unsigned char *ptr, int len)
7267 tree type = TREE_TYPE (expr);
7268 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7269 int byte, offset, word, words, bitpos;
7270 unsigned char value;
7272 /* There are always 32 bits in each long, no matter the size of
7273 the hosts long. We handle floating point representations with
7277 if (total_bytes > len)
7279 words = 32 / UNITS_PER_WORD;
7281 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7283 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7284 bitpos += BITS_PER_UNIT)
7286 byte = (bitpos / BITS_PER_UNIT) & 3;
7287 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7289 if (UNITS_PER_WORD < 4)
7291 word = byte / UNITS_PER_WORD;
7292 if (WORDS_BIG_ENDIAN)
7293 word = (words - 1) - word;
7294 offset = word * UNITS_PER_WORD;
7295 if (BYTES_BIG_ENDIAN)
7296 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7298 offset += byte % UNITS_PER_WORD;
7301 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7302 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7307 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7308 specified by EXPR into the buffer PTR of length LEN bytes.
7309 Return the number of bytes placed in the buffer, or zero
7313 native_encode_complex (tree expr, unsigned char *ptr, int len)
7318 part = TREE_REALPART (expr);
7319 rsize = native_encode_expr (part, ptr, len);
7322 part = TREE_IMAGPART (expr);
7323 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7326 return rsize + isize;
7330 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7331 specified by EXPR into the buffer PTR of length LEN bytes.
7332 Return the number of bytes placed in the buffer, or zero
7336 native_encode_vector (tree expr, unsigned char *ptr, int len)
7338 int i, size, offset, count;
7339 tree itype, elem, elements;
7342 elements = TREE_VECTOR_CST_ELTS (expr);
7343 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7344 itype = TREE_TYPE (TREE_TYPE (expr));
7345 size = GET_MODE_SIZE (TYPE_MODE (itype));
7346 for (i = 0; i < count; i++)
7350 elem = TREE_VALUE (elements);
7351 elements = TREE_CHAIN (elements);
7358 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7363 if (offset + size > len)
7365 memset (ptr+offset, 0, size);
7373 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7374 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7375 buffer PTR of length LEN bytes. Return the number of bytes
7376 placed in the buffer, or zero upon failure. */
7379 native_encode_expr (tree expr, unsigned char *ptr, int len)
7381 switch (TREE_CODE (expr))
7384 return native_encode_int (expr, ptr, len);
7387 return native_encode_real (expr, ptr, len);
7390 return native_encode_complex (expr, ptr, len);
7393 return native_encode_vector (expr, ptr, len);
7401 /* Subroutine of native_interpret_expr. Interpret the contents of
7402 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7403 If the buffer cannot be interpreted, return NULL_TREE. */
7406 native_interpret_int (tree type, unsigned char *ptr, int len)
7408 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7409 int byte, offset, word, words;
7410 unsigned char value;
7411 unsigned int HOST_WIDE_INT lo = 0;
7412 HOST_WIDE_INT hi = 0;
7414 if (total_bytes > len)
7416 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7418 words = total_bytes / UNITS_PER_WORD;
7420 for (byte = 0; byte < total_bytes; byte++)
7422 int bitpos = byte * BITS_PER_UNIT;
7423 if (total_bytes > UNITS_PER_WORD)
7425 word = byte / UNITS_PER_WORD;
7426 if (WORDS_BIG_ENDIAN)
7427 word = (words - 1) - word;
7428 offset = word * UNITS_PER_WORD;
7429 if (BYTES_BIG_ENDIAN)
7430 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7432 offset += byte % UNITS_PER_WORD;
7435 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7436 value = ptr[offset];
7438 if (bitpos < HOST_BITS_PER_WIDE_INT)
7439 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7441 hi |= (unsigned HOST_WIDE_INT) value
7442 << (bitpos - HOST_BITS_PER_WIDE_INT);
7445 return build_int_cst_wide_type (type, lo, hi);
7449 /* Subroutine of native_interpret_expr. Interpret the contents of
7450 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7451 If the buffer cannot be interpreted, return NULL_TREE. */
7454 native_interpret_real (tree type, unsigned char *ptr, int len)
7456 enum machine_mode mode = TYPE_MODE (type);
7457 int total_bytes = GET_MODE_SIZE (mode);
7458 int byte, offset, word, words, bitpos;
7459 unsigned char value;
7460 /* There are always 32 bits in each long, no matter the size of
7461 the hosts long. We handle floating point representations with
7466 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7467 if (total_bytes > len || total_bytes > 24)
7469 words = 32 / UNITS_PER_WORD;
7471 memset (tmp, 0, sizeof (tmp));
7472 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7473 bitpos += BITS_PER_UNIT)
7475 byte = (bitpos / BITS_PER_UNIT) & 3;
7476 if (UNITS_PER_WORD < 4)
7478 word = byte / UNITS_PER_WORD;
7479 if (WORDS_BIG_ENDIAN)
7480 word = (words - 1) - word;
7481 offset = word * UNITS_PER_WORD;
7482 if (BYTES_BIG_ENDIAN)
7483 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7485 offset += byte % UNITS_PER_WORD;
7488 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7489 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7491 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7494 real_from_target (&r, tmp, mode);
7495 return build_real (type, r);
7499 /* Subroutine of native_interpret_expr. Interpret the contents of
7500 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7501 If the buffer cannot be interpreted, return NULL_TREE. */
7504 native_interpret_complex (tree type, unsigned char *ptr, int len)
7506 tree etype, rpart, ipart;
7509 etype = TREE_TYPE (type);
7510 size = GET_MODE_SIZE (TYPE_MODE (etype));
7513 rpart = native_interpret_expr (etype, ptr, size);
7516 ipart = native_interpret_expr (etype, ptr+size, size);
7519 return build_complex (type, rpart, ipart);
7523 /* Subroutine of native_interpret_expr. Interpret the contents of
7524 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7525 If the buffer cannot be interpreted, return NULL_TREE. */
7528 native_interpret_vector (tree type, unsigned char *ptr, int len)
7530 tree etype, elem, elements;
7533 etype = TREE_TYPE (type);
7534 size = GET_MODE_SIZE (TYPE_MODE (etype));
7535 count = TYPE_VECTOR_SUBPARTS (type);
7536 if (size * count > len)
7539 elements = NULL_TREE;
7540 for (i = count - 1; i >= 0; i--)
7542 elem = native_interpret_expr (etype, ptr+(i*size), size);
7545 elements = tree_cons (NULL_TREE, elem, elements);
7547 return build_vector (type, elements);
7551 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7552 the buffer PTR of length LEN as a constant of type TYPE. For
7553 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7554 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7555 return NULL_TREE. */
7558 native_interpret_expr (tree type, unsigned char *ptr, int len)
7560 switch (TREE_CODE (type))
7565 return native_interpret_int (type, ptr, len);
7568 return native_interpret_real (type, ptr, len);
7571 return native_interpret_complex (type, ptr, len);
7574 return native_interpret_vector (type, ptr, len);
7582 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7583 TYPE at compile-time. If we're unable to perform the conversion
7584 return NULL_TREE. */
7587 fold_view_convert_expr (tree type, tree expr)
7589 /* We support up to 512-bit values (for V8DFmode). */
7590 unsigned char buffer[64];
7593 /* Check that the host and target are sane. */
7594 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7597 len = native_encode_expr (expr, buffer, sizeof (buffer));
7601 return native_interpret_expr (type, buffer, len);
7604 /* Build an expression for the address of T. Folds away INDIRECT_REF
7605 to avoid confusing the gimplify process. When IN_FOLD is true
7606 avoid modifications of T. */
7609 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7611 /* The size of the object is not relevant when talking about its address. */
7612 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7613 t = TREE_OPERAND (t, 0);
7615 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7616 if (TREE_CODE (t) == INDIRECT_REF
7617 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7619 t = TREE_OPERAND (t, 0);
7621 if (TREE_TYPE (t) != ptrtype)
7622 t = build1 (NOP_EXPR, ptrtype, t);
7628 while (handled_component_p (base))
7629 base = TREE_OPERAND (base, 0);
7632 TREE_ADDRESSABLE (base) = 1;
7634 t = build1 (ADDR_EXPR, ptrtype, t);
7637 t = build1 (ADDR_EXPR, ptrtype, t);
7642 /* Build an expression for the address of T with type PTRTYPE. This
7643 function modifies the input parameter 'T' by sometimes setting the
7644 TREE_ADDRESSABLE flag. */
7647 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7649 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7652 /* Build an expression for the address of T. This function modifies
7653 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7654 flag. When called from fold functions, use fold_addr_expr instead. */
7657 build_fold_addr_expr (tree t)
7659 return build_fold_addr_expr_with_type_1 (t,
7660 build_pointer_type (TREE_TYPE (t)),
7664 /* Same as build_fold_addr_expr, builds an expression for the address
7665 of T, but avoids touching the input node 't'. Fold functions
7666 should use this version. */
7669 fold_addr_expr (tree t)
7671 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7673 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7676 /* Fold a unary expression of code CODE and type TYPE with operand
7677 OP0. Return the folded expression if folding is successful.
7678 Otherwise, return NULL_TREE. */
7681 fold_unary (enum tree_code code, tree type, tree op0)
7685 enum tree_code_class kind = TREE_CODE_CLASS (code);
7687 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7688 && TREE_CODE_LENGTH (code) == 1);
7693 if (code == NOP_EXPR || code == CONVERT_EXPR
7694 || code == FLOAT_EXPR || code == ABS_EXPR)
7696 /* Don't use STRIP_NOPS, because signedness of argument type
7698 STRIP_SIGN_NOPS (arg0);
7702 /* Strip any conversions that don't change the mode. This
7703 is safe for every expression, except for a comparison
7704 expression because its signedness is derived from its
7707 Note that this is done as an internal manipulation within
7708 the constant folder, in order to find the simplest
7709 representation of the arguments so that their form can be
7710 studied. In any cases, the appropriate type conversions
7711 should be put back in the tree that will get out of the
7717 if (TREE_CODE_CLASS (code) == tcc_unary)
7719 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7720 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7721 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7722 else if (TREE_CODE (arg0) == COND_EXPR)
7724 tree arg01 = TREE_OPERAND (arg0, 1);
7725 tree arg02 = TREE_OPERAND (arg0, 2);
7726 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7727 arg01 = fold_build1 (code, type, arg01);
7728 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7729 arg02 = fold_build1 (code, type, arg02);
7730 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7733 /* If this was a conversion, and all we did was to move into
7734 inside the COND_EXPR, bring it back out. But leave it if
7735 it is a conversion from integer to integer and the
7736 result precision is no wider than a word since such a
7737 conversion is cheap and may be optimized away by combine,
7738 while it couldn't if it were outside the COND_EXPR. Then return
7739 so we don't get into an infinite recursion loop taking the
7740 conversion out and then back in. */
7742 if ((code == NOP_EXPR || code == CONVERT_EXPR
7743 || code == NON_LVALUE_EXPR)
7744 && TREE_CODE (tem) == COND_EXPR
7745 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7746 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7747 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7748 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7749 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7750 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7751 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7753 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7754 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7755 || flag_syntax_only))
7756 tem = build1 (code, type,
7758 TREE_TYPE (TREE_OPERAND
7759 (TREE_OPERAND (tem, 1), 0)),
7760 TREE_OPERAND (tem, 0),
7761 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7762 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7765 else if (COMPARISON_CLASS_P (arg0))
7767 if (TREE_CODE (type) == BOOLEAN_TYPE)
7769 arg0 = copy_node (arg0);
7770 TREE_TYPE (arg0) = type;
7773 else if (TREE_CODE (type) != INTEGER_TYPE)
7774 return fold_build3 (COND_EXPR, type, arg0,
7775 fold_build1 (code, type,
7777 fold_build1 (code, type,
7778 integer_zero_node));
7787 case FIX_TRUNC_EXPR:
7788 if (TREE_TYPE (op0) == type)
7791 /* If we have (type) (a CMP b) and type is an integral type, return
7792 new expression involving the new type. */
7793 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7794 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7795 TREE_OPERAND (op0, 1));
7797 /* Handle cases of two conversions in a row. */
7798 if (TREE_CODE (op0) == NOP_EXPR
7799 || TREE_CODE (op0) == CONVERT_EXPR)
7801 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7802 tree inter_type = TREE_TYPE (op0);
7803 int inside_int = INTEGRAL_TYPE_P (inside_type);
7804 int inside_ptr = POINTER_TYPE_P (inside_type);
7805 int inside_float = FLOAT_TYPE_P (inside_type);
7806 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7807 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7808 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7809 int inter_int = INTEGRAL_TYPE_P (inter_type);
7810 int inter_ptr = POINTER_TYPE_P (inter_type);
7811 int inter_float = FLOAT_TYPE_P (inter_type);
7812 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7813 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7814 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7815 int final_int = INTEGRAL_TYPE_P (type);
7816 int final_ptr = POINTER_TYPE_P (type);
7817 int final_float = FLOAT_TYPE_P (type);
7818 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7819 unsigned int final_prec = TYPE_PRECISION (type);
7820 int final_unsignedp = TYPE_UNSIGNED (type);
7822 /* In addition to the cases of two conversions in a row
7823 handled below, if we are converting something to its own
7824 type via an object of identical or wider precision, neither
7825 conversion is needed. */
7826 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7827 && (((inter_int || inter_ptr) && final_int)
7828 || (inter_float && final_float))
7829 && inter_prec >= final_prec)
7830 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7832 /* Likewise, if the intermediate and final types are either both
7833 float or both integer, we don't need the middle conversion if
7834 it is wider than the final type and doesn't change the signedness
7835 (for integers). Avoid this if the final type is a pointer
7836 since then we sometimes need the inner conversion. Likewise if
7837 the outer has a precision not equal to the size of its mode. */
7838 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7839 || (inter_float && inside_float)
7840 || (inter_vec && inside_vec))
7841 && inter_prec >= inside_prec
7842 && (inter_float || inter_vec
7843 || inter_unsignedp == inside_unsignedp)
7844 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7845 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7847 && (! final_vec || inter_prec == inside_prec))
7848 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7850 /* If we have a sign-extension of a zero-extended value, we can
7851 replace that by a single zero-extension. */
7852 if (inside_int && inter_int && final_int
7853 && inside_prec < inter_prec && inter_prec < final_prec
7854 && inside_unsignedp && !inter_unsignedp)
7855 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7857 /* Two conversions in a row are not needed unless:
7858 - some conversion is floating-point (overstrict for now), or
7859 - some conversion is a vector (overstrict for now), or
7860 - the intermediate type is narrower than both initial and
7862 - the intermediate type and innermost type differ in signedness,
7863 and the outermost type is wider than the intermediate, or
7864 - the initial type is a pointer type and the precisions of the
7865 intermediate and final types differ, or
7866 - the final type is a pointer type and the precisions of the
7867 initial and intermediate types differ.
7868 - the final type is a pointer type and the initial type not
7869 - the initial type is a pointer to an array and the final type
7871 if (! inside_float && ! inter_float && ! final_float
7872 && ! inside_vec && ! inter_vec && ! final_vec
7873 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7874 && ! (inside_int && inter_int
7875 && inter_unsignedp != inside_unsignedp
7876 && inter_prec < final_prec)
7877 && ((inter_unsignedp && inter_prec > inside_prec)
7878 == (final_unsignedp && final_prec > inter_prec))
7879 && ! (inside_ptr && inter_prec != final_prec)
7880 && ! (final_ptr && inside_prec != inter_prec)
7881 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7882 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7883 && final_ptr == inside_ptr
7885 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7886 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7887 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7890 /* Handle (T *)&A.B.C for A being of type T and B and C
7891 living at offset zero. This occurs frequently in
7892 C++ upcasting and then accessing the base. */
7893 if (TREE_CODE (op0) == ADDR_EXPR
7894 && POINTER_TYPE_P (type)
7895 && handled_component_p (TREE_OPERAND (op0, 0)))
7897 HOST_WIDE_INT bitsize, bitpos;
7899 enum machine_mode mode;
7900 int unsignedp, volatilep;
7901 tree base = TREE_OPERAND (op0, 0);
7902 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7903 &mode, &unsignedp, &volatilep, false);
7904 /* If the reference was to a (constant) zero offset, we can use
7905 the address of the base if it has the same base type
7906 as the result type. */
7907 if (! offset && bitpos == 0
7908 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7909 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7910 return fold_convert (type, fold_addr_expr (base));
7913 if ((TREE_CODE (op0) == MODIFY_EXPR
7914 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7915 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7916 /* Detect assigning a bitfield. */
7917 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7919 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7921 /* Don't leave an assignment inside a conversion
7922 unless assigning a bitfield. */
7923 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7924 /* First do the assignment, then return converted constant. */
7925 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7926 TREE_NO_WARNING (tem) = 1;
7927 TREE_USED (tem) = 1;
7931 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7932 constants (if x has signed type, the sign bit cannot be set
7933 in c). This folds extension into the BIT_AND_EXPR. */
7934 if (INTEGRAL_TYPE_P (type)
7935 && TREE_CODE (type) != BOOLEAN_TYPE
7936 && TREE_CODE (op0) == BIT_AND_EXPR
7937 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7940 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7943 if (TYPE_UNSIGNED (TREE_TYPE (and))
7944 || (TYPE_PRECISION (type)
7945 <= TYPE_PRECISION (TREE_TYPE (and))))
7947 else if (TYPE_PRECISION (TREE_TYPE (and1))
7948 <= HOST_BITS_PER_WIDE_INT
7949 && host_integerp (and1, 1))
7951 unsigned HOST_WIDE_INT cst;
7953 cst = tree_low_cst (and1, 1);
7954 cst &= (HOST_WIDE_INT) -1
7955 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7956 change = (cst == 0);
7957 #ifdef LOAD_EXTEND_OP
7959 && !flag_syntax_only
7960 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7963 tree uns = unsigned_type_for (TREE_TYPE (and0));
7964 and0 = fold_convert (uns, and0);
7965 and1 = fold_convert (uns, and1);
7971 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7972 TREE_INT_CST_HIGH (and1), 0,
7973 TREE_OVERFLOW (and1));
7974 return fold_build2 (BIT_AND_EXPR, type,
7975 fold_convert (type, and0), tem);
7979 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7980 when one of the new casts will fold away. Conservatively we assume
7981 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7982 if (POINTER_TYPE_P (type)
7983 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7984 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7985 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7986 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7988 tree arg00 = TREE_OPERAND (arg0, 0);
7989 tree arg01 = TREE_OPERAND (arg0, 1);
7991 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7992 fold_convert (sizetype, arg01));
7995 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7996 of the same precision, and X is an integer type not narrower than
7997 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7998 if (INTEGRAL_TYPE_P (type)
7999 && TREE_CODE (op0) == BIT_NOT_EXPR
8000 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8001 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
8002 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
8003 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8005 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8006 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8007 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8008 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8011 tem = fold_convert_const (code, type, op0);
8012 return tem ? tem : NULL_TREE;
8014 case VIEW_CONVERT_EXPR:
8015 if (TREE_TYPE (op0) == type)
8017 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8018 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8019 return fold_view_convert_expr (type, op0);
8022 tem = fold_negate_expr (arg0);
8024 return fold_convert (type, tem);
8028 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8029 return fold_abs_const (arg0, type);
8030 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8031 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8032 /* Convert fabs((double)float) into (double)fabsf(float). */
8033 else if (TREE_CODE (arg0) == NOP_EXPR
8034 && TREE_CODE (type) == REAL_TYPE)
8036 tree targ0 = strip_float_extensions (arg0);
8038 return fold_convert (type, fold_build1 (ABS_EXPR,
8042 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8043 else if (TREE_CODE (arg0) == ABS_EXPR)
8045 else if (tree_expr_nonnegative_p (arg0))
8048 /* Strip sign ops from argument. */
8049 if (TREE_CODE (type) == REAL_TYPE)
8051 tem = fold_strip_sign_ops (arg0);
8053 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8058 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8059 return fold_convert (type, arg0);
8060 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8062 tree itype = TREE_TYPE (type);
8063 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8064 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8065 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8067 if (TREE_CODE (arg0) == COMPLEX_CST)
8069 tree itype = TREE_TYPE (type);
8070 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8071 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8072 return build_complex (type, rpart, negate_expr (ipart));
8074 if (TREE_CODE (arg0) == CONJ_EXPR)
8075 return fold_convert (type, TREE_OPERAND (arg0, 0));
8079 if (TREE_CODE (arg0) == INTEGER_CST)
8080 return fold_not_const (arg0, type);
8081 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8082 return TREE_OPERAND (arg0, 0);
8083 /* Convert ~ (-A) to A - 1. */
8084 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8085 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
8086 build_int_cst (type, 1));
8087 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8088 else if (INTEGRAL_TYPE_P (type)
8089 && ((TREE_CODE (arg0) == MINUS_EXPR
8090 && integer_onep (TREE_OPERAND (arg0, 1)))
8091 || (TREE_CODE (arg0) == PLUS_EXPR
8092 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8093 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8094 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8095 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8096 && (tem = fold_unary (BIT_NOT_EXPR, type,
8098 TREE_OPERAND (arg0, 0)))))
8099 return fold_build2 (BIT_XOR_EXPR, type, tem,
8100 fold_convert (type, TREE_OPERAND (arg0, 1)));
8101 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8102 && (tem = fold_unary (BIT_NOT_EXPR, type,
8104 TREE_OPERAND (arg0, 1)))))
8105 return fold_build2 (BIT_XOR_EXPR, type,
8106 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8110 case TRUTH_NOT_EXPR:
8111 /* The argument to invert_truthvalue must have Boolean type. */
8112 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8113 arg0 = fold_convert (boolean_type_node, arg0);
8115 /* Note that the operand of this must be an int
8116 and its values must be 0 or 1.
8117 ("true" is a fixed value perhaps depending on the language,
8118 but we don't handle values other than 1 correctly yet.) */
8119 tem = fold_truth_not_expr (arg0);
8122 return fold_convert (type, tem);
8125 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8126 return fold_convert (type, arg0);
8127 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8128 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8129 TREE_OPERAND (arg0, 1));
8130 if (TREE_CODE (arg0) == COMPLEX_CST)
8131 return fold_convert (type, TREE_REALPART (arg0));
8132 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8134 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8135 tem = fold_build2 (TREE_CODE (arg0), itype,
8136 fold_build1 (REALPART_EXPR, itype,
8137 TREE_OPERAND (arg0, 0)),
8138 fold_build1 (REALPART_EXPR, itype,
8139 TREE_OPERAND (arg0, 1)));
8140 return fold_convert (type, tem);
8142 if (TREE_CODE (arg0) == CONJ_EXPR)
8144 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8145 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8146 return fold_convert (type, tem);
8148 if (TREE_CODE (arg0) == CALL_EXPR)
8150 tree fn = get_callee_fndecl (arg0);
8151 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8152 switch (DECL_FUNCTION_CODE (fn))
8154 CASE_FLT_FN (BUILT_IN_CEXPI):
8155 fn = mathfn_built_in (type, BUILT_IN_COS);
8157 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8167 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8168 return fold_convert (type, integer_zero_node);
8169 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8170 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8171 TREE_OPERAND (arg0, 0));
8172 if (TREE_CODE (arg0) == COMPLEX_CST)
8173 return fold_convert (type, TREE_IMAGPART (arg0));
8174 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8176 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8177 tem = fold_build2 (TREE_CODE (arg0), itype,
8178 fold_build1 (IMAGPART_EXPR, itype,
8179 TREE_OPERAND (arg0, 0)),
8180 fold_build1 (IMAGPART_EXPR, itype,
8181 TREE_OPERAND (arg0, 1)));
8182 return fold_convert (type, tem);
8184 if (TREE_CODE (arg0) == CONJ_EXPR)
8186 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8187 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8188 return fold_convert (type, negate_expr (tem));
8190 if (TREE_CODE (arg0) == CALL_EXPR)
8192 tree fn = get_callee_fndecl (arg0);
8193 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8194 switch (DECL_FUNCTION_CODE (fn))
8196 CASE_FLT_FN (BUILT_IN_CEXPI):
8197 fn = mathfn_built_in (type, BUILT_IN_SIN);
8199 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8210 } /* switch (code) */
8213 /* Fold a binary expression of code CODE and type TYPE with operands
8214 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8215 Return the folded expression if folding is successful. Otherwise,
8216 return NULL_TREE. */
8219 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8221 enum tree_code compl_code;
8223 if (code == MIN_EXPR)
8224 compl_code = MAX_EXPR;
8225 else if (code == MAX_EXPR)
8226 compl_code = MIN_EXPR;
8230 /* MIN (MAX (a, b), b) == b. */
8231 if (TREE_CODE (op0) == compl_code
8232 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8233 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8235 /* MIN (MAX (b, a), b) == b. */
8236 if (TREE_CODE (op0) == compl_code
8237 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8238 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8239 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8241 /* MIN (a, MAX (a, b)) == a. */
8242 if (TREE_CODE (op1) == compl_code
8243 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8244 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8245 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8247 /* MIN (a, MAX (b, a)) == a. */
8248 if (TREE_CODE (op1) == compl_code
8249 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8250 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8251 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8256 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8257 by changing CODE to reduce the magnitude of constants involved in
8258 ARG0 of the comparison.
8259 Returns a canonicalized comparison tree if a simplification was
8260 possible, otherwise returns NULL_TREE.
8261 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8262 valid if signed overflow is undefined. */
8265 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8266 tree arg0, tree arg1,
8267 bool *strict_overflow_p)
8269 enum tree_code code0 = TREE_CODE (arg0);
8270 tree t, cst0 = NULL_TREE;
8274 /* Match A +- CST code arg1 and CST code arg1. */
8275 if (!(((code0 == MINUS_EXPR
8276 || code0 == PLUS_EXPR)
8277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8278 || code0 == INTEGER_CST))
8281 /* Identify the constant in arg0 and its sign. */
8282 if (code0 == INTEGER_CST)
8285 cst0 = TREE_OPERAND (arg0, 1);
8286 sgn0 = tree_int_cst_sgn (cst0);
8288 /* Overflowed constants and zero will cause problems. */
8289 if (integer_zerop (cst0)
8290 || TREE_OVERFLOW (cst0))
8293 /* See if we can reduce the magnitude of the constant in
8294 arg0 by changing the comparison code. */
8295 if (code0 == INTEGER_CST)
8297 /* CST <= arg1 -> CST-1 < arg1. */
8298 if (code == LE_EXPR && sgn0 == 1)
8300 /* -CST < arg1 -> -CST-1 <= arg1. */
8301 else if (code == LT_EXPR && sgn0 == -1)
8303 /* CST > arg1 -> CST-1 >= arg1. */
8304 else if (code == GT_EXPR && sgn0 == 1)
8306 /* -CST >= arg1 -> -CST-1 > arg1. */
8307 else if (code == GE_EXPR && sgn0 == -1)
8311 /* arg1 code' CST' might be more canonical. */
8316 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8318 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8320 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8321 else if (code == GT_EXPR
8322 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8324 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8325 else if (code == LE_EXPR
8326 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8328 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8329 else if (code == GE_EXPR
8330 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8334 *strict_overflow_p = true;
8337 /* Now build the constant reduced in magnitude. */
8338 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8339 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8340 if (code0 != INTEGER_CST)
8341 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8343 /* If swapping might yield to a more canonical form, do so. */
8345 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8347 return fold_build2 (code, type, t, arg1);
8350 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8351 overflow further. Try to decrease the magnitude of constants involved
8352 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8353 and put sole constants at the second argument position.
8354 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8357 maybe_canonicalize_comparison (enum tree_code code, tree type,
8358 tree arg0, tree arg1)
8361 bool strict_overflow_p;
8362 const char * const warnmsg = G_("assuming signed overflow does not occur "
8363 "when reducing constant in comparison");
8365 /* In principle pointers also have undefined overflow behavior,
8366 but that causes problems elsewhere. */
8367 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8368 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8371 /* Try canonicalization by simplifying arg0. */
8372 strict_overflow_p = false;
8373 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8374 &strict_overflow_p);
8377 if (strict_overflow_p)
8378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8382 /* Try canonicalization by simplifying arg1 using the swapped
8384 code = swap_tree_comparison (code);
8385 strict_overflow_p = false;
8386 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8387 &strict_overflow_p);
8388 if (t && strict_overflow_p)
8389 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8393 /* Subroutine of fold_binary. This routine performs all of the
8394 transformations that are common to the equality/inequality
8395 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8396 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8397 fold_binary should call fold_binary. Fold a comparison with
8398 tree code CODE and type TYPE with operands OP0 and OP1. Return
8399 the folded comparison or NULL_TREE. */
8402 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8404 tree arg0, arg1, tem;
8409 STRIP_SIGN_NOPS (arg0);
8410 STRIP_SIGN_NOPS (arg1);
8412 tem = fold_relational_const (code, type, arg0, arg1);
8413 if (tem != NULL_TREE)
8416 /* If one arg is a real or integer constant, put it last. */
8417 if (tree_swap_operands_p (arg0, arg1, true))
8418 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8420 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8421 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8422 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8423 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8424 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8425 && (TREE_CODE (arg1) == INTEGER_CST
8426 && !TREE_OVERFLOW (arg1)))
8428 tree const1 = TREE_OPERAND (arg0, 1);
8430 tree variable = TREE_OPERAND (arg0, 0);
8433 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8435 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8436 TREE_TYPE (arg1), const2, const1);
8438 /* If the constant operation overflowed this can be
8439 simplified as a comparison against INT_MAX/INT_MIN. */
8440 if (TREE_CODE (lhs) == INTEGER_CST
8441 && TREE_OVERFLOW (lhs))
8443 int const1_sgn = tree_int_cst_sgn (const1);
8444 enum tree_code code2 = code;
8446 /* Get the sign of the constant on the lhs if the
8447 operation were VARIABLE + CONST1. */
8448 if (TREE_CODE (arg0) == MINUS_EXPR)
8449 const1_sgn = -const1_sgn;
8451 /* The sign of the constant determines if we overflowed
8452 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8453 Canonicalize to the INT_MIN overflow by swapping the comparison
8455 if (const1_sgn == -1)
8456 code2 = swap_tree_comparison (code);
8458 /* We now can look at the canonicalized case
8459 VARIABLE + 1 CODE2 INT_MIN
8460 and decide on the result. */
8461 if (code2 == LT_EXPR
8463 || code2 == EQ_EXPR)
8464 return omit_one_operand (type, boolean_false_node, variable);
8465 else if (code2 == NE_EXPR
8467 || code2 == GT_EXPR)
8468 return omit_one_operand (type, boolean_true_node, variable);
8471 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8472 && (TREE_CODE (lhs) != INTEGER_CST
8473 || !TREE_OVERFLOW (lhs)))
8475 fold_overflow_warning (("assuming signed overflow does not occur "
8476 "when changing X +- C1 cmp C2 to "
8478 WARN_STRICT_OVERFLOW_COMPARISON);
8479 return fold_build2 (code, type, variable, lhs);
8483 /* For comparisons of pointers we can decompose it to a compile time
8484 comparison of the base objects and the offsets into the object.
8485 This requires at least one operand being an ADDR_EXPR to do more
8486 than the operand_equal_p test below. */
8487 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8488 && (TREE_CODE (arg0) == ADDR_EXPR
8489 || TREE_CODE (arg1) == ADDR_EXPR))
8491 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8492 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8493 enum machine_mode mode;
8494 int volatilep, unsignedp;
8495 bool indirect_base0 = false;
8497 /* Get base and offset for the access. Strip ADDR_EXPR for
8498 get_inner_reference, but put it back by stripping INDIRECT_REF
8499 off the base object if possible. */
8501 if (TREE_CODE (arg0) == ADDR_EXPR)
8503 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8504 &bitsize, &bitpos0, &offset0, &mode,
8505 &unsignedp, &volatilep, false);
8506 if (TREE_CODE (base0) == INDIRECT_REF)
8507 base0 = TREE_OPERAND (base0, 0);
8509 indirect_base0 = true;
8513 if (TREE_CODE (arg1) == ADDR_EXPR)
8515 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8516 &bitsize, &bitpos1, &offset1, &mode,
8517 &unsignedp, &volatilep, false);
8518 /* We have to make sure to have an indirect/non-indirect base1
8519 just the same as we did for base0. */
8520 if (TREE_CODE (base1) == INDIRECT_REF
8522 base1 = TREE_OPERAND (base1, 0);
8523 else if (!indirect_base0)
8526 else if (indirect_base0)
8529 /* If we have equivalent bases we might be able to simplify. */
8531 && operand_equal_p (base0, base1, 0))
8533 /* We can fold this expression to a constant if the non-constant
8534 offset parts are equal. */
8535 if (offset0 == offset1
8536 || (offset0 && offset1
8537 && operand_equal_p (offset0, offset1, 0)))
8542 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8544 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8546 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8548 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8550 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8552 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8556 /* We can simplify the comparison to a comparison of the variable
8557 offset parts if the constant offset parts are equal.
8558 Be careful to use signed size type here because otherwise we
8559 mess with array offsets in the wrong way. This is possible
8560 because pointer arithmetic is restricted to retain within an
8561 object and overflow on pointer differences is undefined as of
8562 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8563 else if (bitpos0 == bitpos1)
8565 tree signed_size_type_node;
8566 signed_size_type_node = signed_type_for (size_type_node);
8568 /* By converting to signed size type we cover middle-end pointer
8569 arithmetic which operates on unsigned pointer types of size
8570 type size and ARRAY_REF offsets which are properly sign or
8571 zero extended from their type in case it is narrower than
8573 if (offset0 == NULL_TREE)
8574 offset0 = build_int_cst (signed_size_type_node, 0);
8576 offset0 = fold_convert (signed_size_type_node, offset0);
8577 if (offset1 == NULL_TREE)
8578 offset1 = build_int_cst (signed_size_type_node, 0);
8580 offset1 = fold_convert (signed_size_type_node, offset1);
8582 return fold_build2 (code, type, offset0, offset1);
8587 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8588 same object, then we can fold this to a comparison of the two offsets in
8589 signed size type. This is possible because pointer arithmetic is
8590 restricted to retain within an object and overflow on pointer differences
8591 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8593 We check flag_wrapv directly because pointers types are unsigned,
8594 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8595 normally what we want to avoid certain odd overflow cases, but
8597 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8599 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8601 tree base0, offset0, base1, offset1;
8603 if (extract_array_ref (arg0, &base0, &offset0)
8604 && extract_array_ref (arg1, &base1, &offset1)
8605 && operand_equal_p (base0, base1, 0))
8607 tree signed_size_type_node;
8608 signed_size_type_node = signed_type_for (size_type_node);
8610 /* By converting to signed size type we cover middle-end pointer
8611 arithmetic which operates on unsigned pointer types of size
8612 type size and ARRAY_REF offsets which are properly sign or
8613 zero extended from their type in case it is narrower than
8615 if (offset0 == NULL_TREE)
8616 offset0 = build_int_cst (signed_size_type_node, 0);
8618 offset0 = fold_convert (signed_size_type_node, offset0);
8619 if (offset1 == NULL_TREE)
8620 offset1 = build_int_cst (signed_size_type_node, 0);
8622 offset1 = fold_convert (signed_size_type_node, offset1);
8624 return fold_build2 (code, type, offset0, offset1);
8628 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8629 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8630 the resulting offset is smaller in absolute value than the
8632 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8633 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8634 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8635 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8636 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8637 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8638 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8640 tree const1 = TREE_OPERAND (arg0, 1);
8641 tree const2 = TREE_OPERAND (arg1, 1);
8642 tree variable1 = TREE_OPERAND (arg0, 0);
8643 tree variable2 = TREE_OPERAND (arg1, 0);
8645 const char * const warnmsg = G_("assuming signed overflow does not "
8646 "occur when combining constants around "
8649 /* Put the constant on the side where it doesn't overflow and is
8650 of lower absolute value than before. */
8651 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8652 ? MINUS_EXPR : PLUS_EXPR,
8654 if (!TREE_OVERFLOW (cst)
8655 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8657 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8658 return fold_build2 (code, type,
8660 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8664 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8665 ? MINUS_EXPR : PLUS_EXPR,
8667 if (!TREE_OVERFLOW (cst)
8668 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8670 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8671 return fold_build2 (code, type,
8672 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8678 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8679 signed arithmetic case. That form is created by the compiler
8680 often enough for folding it to be of value. One example is in
8681 computing loop trip counts after Operator Strength Reduction. */
8682 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8683 && TREE_CODE (arg0) == MULT_EXPR
8684 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8685 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8686 && integer_zerop (arg1))
8688 tree const1 = TREE_OPERAND (arg0, 1);
8689 tree const2 = arg1; /* zero */
8690 tree variable1 = TREE_OPERAND (arg0, 0);
8691 enum tree_code cmp_code = code;
8693 gcc_assert (!integer_zerop (const1));
8695 fold_overflow_warning (("assuming signed overflow does not occur when "
8696 "eliminating multiplication in comparison "
8698 WARN_STRICT_OVERFLOW_COMPARISON);
8700 /* If const1 is negative we swap the sense of the comparison. */
8701 if (tree_int_cst_sgn (const1) < 0)
8702 cmp_code = swap_tree_comparison (cmp_code);
8704 return fold_build2 (cmp_code, type, variable1, const2);
8707 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8711 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8713 tree targ0 = strip_float_extensions (arg0);
8714 tree targ1 = strip_float_extensions (arg1);
8715 tree newtype = TREE_TYPE (targ0);
8717 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8718 newtype = TREE_TYPE (targ1);
8720 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8721 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8722 return fold_build2 (code, type, fold_convert (newtype, targ0),
8723 fold_convert (newtype, targ1));
8725 /* (-a) CMP (-b) -> b CMP a */
8726 if (TREE_CODE (arg0) == NEGATE_EXPR
8727 && TREE_CODE (arg1) == NEGATE_EXPR)
8728 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8729 TREE_OPERAND (arg0, 0));
8731 if (TREE_CODE (arg1) == REAL_CST)
8733 REAL_VALUE_TYPE cst;
8734 cst = TREE_REAL_CST (arg1);
8736 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8737 if (TREE_CODE (arg0) == NEGATE_EXPR)
8738 return fold_build2 (swap_tree_comparison (code), type,
8739 TREE_OPERAND (arg0, 0),
8740 build_real (TREE_TYPE (arg1),
8741 REAL_VALUE_NEGATE (cst)));
8743 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8744 /* a CMP (-0) -> a CMP 0 */
8745 if (REAL_VALUE_MINUS_ZERO (cst))
8746 return fold_build2 (code, type, arg0,
8747 build_real (TREE_TYPE (arg1), dconst0));
8749 /* x != NaN is always true, other ops are always false. */
8750 if (REAL_VALUE_ISNAN (cst)
8751 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8753 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8754 return omit_one_operand (type, tem, arg0);
8757 /* Fold comparisons against infinity. */
8758 if (REAL_VALUE_ISINF (cst))
8760 tem = fold_inf_compare (code, type, arg0, arg1);
8761 if (tem != NULL_TREE)
8766 /* If this is a comparison of a real constant with a PLUS_EXPR
8767 or a MINUS_EXPR of a real constant, we can convert it into a
8768 comparison with a revised real constant as long as no overflow
8769 occurs when unsafe_math_optimizations are enabled. */
8770 if (flag_unsafe_math_optimizations
8771 && TREE_CODE (arg1) == REAL_CST
8772 && (TREE_CODE (arg0) == PLUS_EXPR
8773 || TREE_CODE (arg0) == MINUS_EXPR)
8774 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8775 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8776 ? MINUS_EXPR : PLUS_EXPR,
8777 arg1, TREE_OPERAND (arg0, 1), 0))
8778 && !TREE_OVERFLOW (tem))
8779 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8781 /* Likewise, we can simplify a comparison of a real constant with
8782 a MINUS_EXPR whose first operand is also a real constant, i.e.
8783 (c1 - x) < c2 becomes x > c1-c2. */
8784 if (flag_unsafe_math_optimizations
8785 && TREE_CODE (arg1) == REAL_CST
8786 && TREE_CODE (arg0) == MINUS_EXPR
8787 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8788 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8790 && !TREE_OVERFLOW (tem))
8791 return fold_build2 (swap_tree_comparison (code), type,
8792 TREE_OPERAND (arg0, 1), tem);
8794 /* Fold comparisons against built-in math functions. */
8795 if (TREE_CODE (arg1) == REAL_CST
8796 && flag_unsafe_math_optimizations
8797 && ! flag_errno_math)
8799 enum built_in_function fcode = builtin_mathfn_code (arg0);
8801 if (fcode != END_BUILTINS)
8803 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8804 if (tem != NULL_TREE)
8810 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8811 && (TREE_CODE (arg0) == NOP_EXPR
8812 || TREE_CODE (arg0) == CONVERT_EXPR))
8814 /* If we are widening one operand of an integer comparison,
8815 see if the other operand is similarly being widened. Perhaps we
8816 can do the comparison in the narrower type. */
8817 tem = fold_widened_comparison (code, type, arg0, arg1);
8821 /* Or if we are changing signedness. */
8822 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8827 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8828 constant, we can simplify it. */
8829 if (TREE_CODE (arg1) == INTEGER_CST
8830 && (TREE_CODE (arg0) == MIN_EXPR
8831 || TREE_CODE (arg0) == MAX_EXPR)
8832 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8834 tem = optimize_minmax_comparison (code, type, op0, op1);
8839 /* Simplify comparison of something with itself. (For IEEE
8840 floating-point, we can only do some of these simplifications.) */
8841 if (operand_equal_p (arg0, arg1, 0))
8846 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8847 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8848 return constant_boolean_node (1, type);
8853 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8854 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8855 return constant_boolean_node (1, type);
8856 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8859 /* For NE, we can only do this simplification if integer
8860 or we don't honor IEEE floating point NaNs. */
8861 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8862 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8864 /* ... fall through ... */
8867 return constant_boolean_node (0, type);
8873 /* If we are comparing an expression that just has comparisons
8874 of two integer values, arithmetic expressions of those comparisons,
8875 and constants, we can simplify it. There are only three cases
8876 to check: the two values can either be equal, the first can be
8877 greater, or the second can be greater. Fold the expression for
8878 those three values. Since each value must be 0 or 1, we have
8879 eight possibilities, each of which corresponds to the constant 0
8880 or 1 or one of the six possible comparisons.
8882 This handles common cases like (a > b) == 0 but also handles
8883 expressions like ((x > y) - (y > x)) > 0, which supposedly
8884 occur in macroized code. */
8886 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8888 tree cval1 = 0, cval2 = 0;
8891 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8892 /* Don't handle degenerate cases here; they should already
8893 have been handled anyway. */
8894 && cval1 != 0 && cval2 != 0
8895 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8896 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8897 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8898 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8899 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8900 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8901 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8903 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8904 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8906 /* We can't just pass T to eval_subst in case cval1 or cval2
8907 was the same as ARG1. */
8910 = fold_build2 (code, type,
8911 eval_subst (arg0, cval1, maxval,
8915 = fold_build2 (code, type,
8916 eval_subst (arg0, cval1, maxval,
8920 = fold_build2 (code, type,
8921 eval_subst (arg0, cval1, minval,
8925 /* All three of these results should be 0 or 1. Confirm they are.
8926 Then use those values to select the proper code to use. */
8928 if (TREE_CODE (high_result) == INTEGER_CST
8929 && TREE_CODE (equal_result) == INTEGER_CST
8930 && TREE_CODE (low_result) == INTEGER_CST)
8932 /* Make a 3-bit mask with the high-order bit being the
8933 value for `>', the next for '=', and the low for '<'. */
8934 switch ((integer_onep (high_result) * 4)
8935 + (integer_onep (equal_result) * 2)
8936 + integer_onep (low_result))
8940 return omit_one_operand (type, integer_zero_node, arg0);
8961 return omit_one_operand (type, integer_one_node, arg0);
8965 return save_expr (build2 (code, type, cval1, cval2));
8966 return fold_build2 (code, type, cval1, cval2);
8971 /* Fold a comparison of the address of COMPONENT_REFs with the same
8972 type and component to a comparison of the address of the base
8973 object. In short, &x->a OP &y->a to x OP y and
8974 &x->a OP &y.a to x OP &y */
8975 if (TREE_CODE (arg0) == ADDR_EXPR
8976 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8977 && TREE_CODE (arg1) == ADDR_EXPR
8978 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8980 tree cref0 = TREE_OPERAND (arg0, 0);
8981 tree cref1 = TREE_OPERAND (arg1, 0);
8982 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8984 tree op0 = TREE_OPERAND (cref0, 0);
8985 tree op1 = TREE_OPERAND (cref1, 0);
8986 return fold_build2 (code, type,
8987 fold_addr_expr (op0),
8988 fold_addr_expr (op1));
8992 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8993 into a single range test. */
8994 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8995 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8996 && TREE_CODE (arg1) == INTEGER_CST
8997 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8998 && !integer_zerop (TREE_OPERAND (arg0, 1))
8999 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9000 && !TREE_OVERFLOW (arg1))
9002 tem = fold_div_compare (code, type, arg0, arg1);
9003 if (tem != NULL_TREE)
9007 /* Fold ~X op ~Y as Y op X. */
9008 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9009 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9011 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9012 return fold_build2 (code, type,
9013 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9014 TREE_OPERAND (arg0, 0));
9017 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9018 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9019 && TREE_CODE (arg1) == INTEGER_CST)
9021 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9022 return fold_build2 (swap_tree_comparison (code), type,
9023 TREE_OPERAND (arg0, 0),
9024 fold_build1 (BIT_NOT_EXPR, cmp_type,
9025 fold_convert (cmp_type, arg1)));
9032 /* Subroutine of fold_binary. Optimize complex multiplications of the
9033 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9034 argument EXPR represents the expression "z" of type TYPE. */
9037 fold_mult_zconjz (tree type, tree expr)
9039 tree itype = TREE_TYPE (type);
9040 tree rpart, ipart, tem;
9042 if (TREE_CODE (expr) == COMPLEX_EXPR)
9044 rpart = TREE_OPERAND (expr, 0);
9045 ipart = TREE_OPERAND (expr, 1);
9047 else if (TREE_CODE (expr) == COMPLEX_CST)
9049 rpart = TREE_REALPART (expr);
9050 ipart = TREE_IMAGPART (expr);
9054 expr = save_expr (expr);
9055 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9056 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9059 rpart = save_expr (rpart);
9060 ipart = save_expr (ipart);
9061 tem = fold_build2 (PLUS_EXPR, itype,
9062 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9063 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9064 return fold_build2 (COMPLEX_EXPR, type, tem,
9065 fold_convert (itype, integer_zero_node));
9069 /* Fold a binary expression of code CODE and type TYPE with operands
9070 OP0 and OP1. Return the folded expression if folding is
9071 successful. Otherwise, return NULL_TREE. */
9074 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9076 enum tree_code_class kind = TREE_CODE_CLASS (code);
9077 tree arg0, arg1, tem;
9078 tree t1 = NULL_TREE;
9079 bool strict_overflow_p;
9081 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9082 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9083 && TREE_CODE_LENGTH (code) == 2
9085 && op1 != NULL_TREE);
9090 /* Strip any conversions that don't change the mode. This is
9091 safe for every expression, except for a comparison expression
9092 because its signedness is derived from its operands. So, in
9093 the latter case, only strip conversions that don't change the
9096 Note that this is done as an internal manipulation within the
9097 constant folder, in order to find the simplest representation
9098 of the arguments so that their form can be studied. In any
9099 cases, the appropriate type conversions should be put back in
9100 the tree that will get out of the constant folder. */
9102 if (kind == tcc_comparison)
9104 STRIP_SIGN_NOPS (arg0);
9105 STRIP_SIGN_NOPS (arg1);
9113 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9114 constant but we can't do arithmetic on them. */
9115 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9116 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9117 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9118 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9120 if (kind == tcc_binary)
9121 tem = const_binop (code, arg0, arg1, 0);
9122 else if (kind == tcc_comparison)
9123 tem = fold_relational_const (code, type, arg0, arg1);
9127 if (tem != NULL_TREE)
9129 if (TREE_TYPE (tem) != type)
9130 tem = fold_convert (type, tem);
9135 /* If this is a commutative operation, and ARG0 is a constant, move it
9136 to ARG1 to reduce the number of tests below. */
9137 if (commutative_tree_code (code)
9138 && tree_swap_operands_p (arg0, arg1, true))
9139 return fold_build2 (code, type, op1, op0);
9141 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9143 First check for cases where an arithmetic operation is applied to a
9144 compound, conditional, or comparison operation. Push the arithmetic
9145 operation inside the compound or conditional to see if any folding
9146 can then be done. Convert comparison to conditional for this purpose.
9147 The also optimizes non-constant cases that used to be done in
9150 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9151 one of the operands is a comparison and the other is a comparison, a
9152 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9153 code below would make the expression more complex. Change it to a
9154 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9155 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9157 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9158 || code == EQ_EXPR || code == NE_EXPR)
9159 && ((truth_value_p (TREE_CODE (arg0))
9160 && (truth_value_p (TREE_CODE (arg1))
9161 || (TREE_CODE (arg1) == BIT_AND_EXPR
9162 && integer_onep (TREE_OPERAND (arg1, 1)))))
9163 || (truth_value_p (TREE_CODE (arg1))
9164 && (truth_value_p (TREE_CODE (arg0))
9165 || (TREE_CODE (arg0) == BIT_AND_EXPR
9166 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9168 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9169 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9172 fold_convert (boolean_type_node, arg0),
9173 fold_convert (boolean_type_node, arg1));
9175 if (code == EQ_EXPR)
9176 tem = invert_truthvalue (tem);
9178 return fold_convert (type, tem);
9181 if (TREE_CODE_CLASS (code) == tcc_binary
9182 || TREE_CODE_CLASS (code) == tcc_comparison)
9184 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9185 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9186 fold_build2 (code, type,
9187 TREE_OPERAND (arg0, 1), op1));
9188 if (TREE_CODE (arg1) == COMPOUND_EXPR
9189 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9190 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9191 fold_build2 (code, type,
9192 op0, TREE_OPERAND (arg1, 1)));
9194 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9196 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9198 /*cond_first_p=*/1);
9199 if (tem != NULL_TREE)
9203 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9205 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9207 /*cond_first_p=*/0);
9208 if (tem != NULL_TREE)
9215 case POINTER_PLUS_EXPR:
9216 /* 0 +p index -> (type)index */
9217 if (integer_zerop (arg0))
9218 return non_lvalue (fold_convert (type, arg1));
9220 /* PTR +p 0 -> PTR */
9221 if (integer_zerop (arg1))
9222 return non_lvalue (fold_convert (type, arg0));
9224 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9225 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9226 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9227 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9228 fold_convert (sizetype, arg1),
9229 fold_convert (sizetype, arg0)));
9231 /* index +p PTR -> PTR +p index */
9232 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9233 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9234 return fold_build2 (POINTER_PLUS_EXPR, type,
9235 fold_convert (type, arg1), fold_convert (sizetype, arg0));
9237 /* (PTR +p B) +p A -> PTR +p (B + A) */
9238 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9241 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9242 tree arg00 = TREE_OPERAND (arg0, 0);
9243 inner = fold_build2 (PLUS_EXPR, sizetype, arg01, fold_convert (sizetype, arg1));
9244 return fold_build2 (POINTER_PLUS_EXPR, type, arg00, inner);
9247 /* PTR_CST +p CST -> CST1 */
9248 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9249 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9251 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9252 of the array. Loop optimizer sometimes produce this type of
9254 if (TREE_CODE (arg0) == ADDR_EXPR)
9256 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9258 return fold_convert (type, tem);
9263 /* PTR + INT -> (INT)(PTR p+ INT) */
9264 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9265 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9266 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9269 fold_convert (sizetype, arg1)));
9270 /* INT + PTR -> (INT)(PTR p+ INT) */
9271 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9272 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9273 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9276 fold_convert (sizetype, arg0)));
9277 /* A + (-B) -> A - B */
9278 if (TREE_CODE (arg1) == NEGATE_EXPR)
9279 return fold_build2 (MINUS_EXPR, type,
9280 fold_convert (type, arg0),
9281 fold_convert (type, TREE_OPERAND (arg1, 0)));
9282 /* (-A) + B -> B - A */
9283 if (TREE_CODE (arg0) == NEGATE_EXPR
9284 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9285 return fold_build2 (MINUS_EXPR, type,
9286 fold_convert (type, arg1),
9287 fold_convert (type, TREE_OPERAND (arg0, 0)));
9289 if (INTEGRAL_TYPE_P (type))
9291 /* Convert ~A + 1 to -A. */
9292 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9293 && integer_onep (arg1))
9294 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9297 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9298 && !TYPE_OVERFLOW_TRAPS (type))
9300 tree tem = TREE_OPERAND (arg0, 0);
9303 if (operand_equal_p (tem, arg1, 0))
9305 t1 = build_int_cst_type (type, -1);
9306 return omit_one_operand (type, t1, arg1);
9311 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9312 && !TYPE_OVERFLOW_TRAPS (type))
9314 tree tem = TREE_OPERAND (arg1, 0);
9317 if (operand_equal_p (arg0, tem, 0))
9319 t1 = build_int_cst_type (type, -1);
9320 return omit_one_operand (type, t1, arg0);
9325 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9327 if ((TREE_CODE (arg0) == MULT_EXPR
9328 || TREE_CODE (arg1) == MULT_EXPR)
9329 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9331 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9336 if (! FLOAT_TYPE_P (type))
9338 if (integer_zerop (arg1))
9339 return non_lvalue (fold_convert (type, arg0));
9341 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9342 with a constant, and the two constants have no bits in common,
9343 we should treat this as a BIT_IOR_EXPR since this may produce more
9345 if (TREE_CODE (arg0) == BIT_AND_EXPR
9346 && TREE_CODE (arg1) == BIT_AND_EXPR
9347 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9348 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9349 && integer_zerop (const_binop (BIT_AND_EXPR,
9350 TREE_OPERAND (arg0, 1),
9351 TREE_OPERAND (arg1, 1), 0)))
9353 code = BIT_IOR_EXPR;
9357 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9358 (plus (plus (mult) (mult)) (foo)) so that we can
9359 take advantage of the factoring cases below. */
9360 if (((TREE_CODE (arg0) == PLUS_EXPR
9361 || TREE_CODE (arg0) == MINUS_EXPR)
9362 && TREE_CODE (arg1) == MULT_EXPR)
9363 || ((TREE_CODE (arg1) == PLUS_EXPR
9364 || TREE_CODE (arg1) == MINUS_EXPR)
9365 && TREE_CODE (arg0) == MULT_EXPR))
9367 tree parg0, parg1, parg, marg;
9368 enum tree_code pcode;
9370 if (TREE_CODE (arg1) == MULT_EXPR)
9371 parg = arg0, marg = arg1;
9373 parg = arg1, marg = arg0;
9374 pcode = TREE_CODE (parg);
9375 parg0 = TREE_OPERAND (parg, 0);
9376 parg1 = TREE_OPERAND (parg, 1);
9380 if (TREE_CODE (parg0) == MULT_EXPR
9381 && TREE_CODE (parg1) != MULT_EXPR)
9382 return fold_build2 (pcode, type,
9383 fold_build2 (PLUS_EXPR, type,
9384 fold_convert (type, parg0),
9385 fold_convert (type, marg)),
9386 fold_convert (type, parg1));
9387 if (TREE_CODE (parg0) != MULT_EXPR
9388 && TREE_CODE (parg1) == MULT_EXPR)
9389 return fold_build2 (PLUS_EXPR, type,
9390 fold_convert (type, parg0),
9391 fold_build2 (pcode, type,
9392 fold_convert (type, marg),
9399 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9400 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9401 return non_lvalue (fold_convert (type, arg0));
9403 /* Likewise if the operands are reversed. */
9404 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9405 return non_lvalue (fold_convert (type, arg1));
9407 /* Convert X + -C into X - C. */
9408 if (TREE_CODE (arg1) == REAL_CST
9409 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9411 tem = fold_negate_const (arg1, type);
9412 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9413 return fold_build2 (MINUS_EXPR, type,
9414 fold_convert (type, arg0),
9415 fold_convert (type, tem));
9418 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9419 to __complex__ ( x, y ). This is not the same for SNaNs or
9420 if signed zeros are involved. */
9421 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9422 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9423 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9425 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9426 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9427 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9428 bool arg0rz = false, arg0iz = false;
9429 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9430 || (arg0i && (arg0iz = real_zerop (arg0i))))
9432 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9433 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9434 if (arg0rz && arg1i && real_zerop (arg1i))
9436 tree rp = arg1r ? arg1r
9437 : build1 (REALPART_EXPR, rtype, arg1);
9438 tree ip = arg0i ? arg0i
9439 : build1 (IMAGPART_EXPR, rtype, arg0);
9440 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9442 else if (arg0iz && arg1r && real_zerop (arg1r))
9444 tree rp = arg0r ? arg0r
9445 : build1 (REALPART_EXPR, rtype, arg0);
9446 tree ip = arg1i ? arg1i
9447 : build1 (IMAGPART_EXPR, rtype, arg1);
9448 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9453 if (flag_unsafe_math_optimizations
9454 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9455 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9456 && (tem = distribute_real_division (code, type, arg0, arg1)))
9459 /* Convert x+x into x*2.0. */
9460 if (operand_equal_p (arg0, arg1, 0)
9461 && SCALAR_FLOAT_TYPE_P (type))
9462 return fold_build2 (MULT_EXPR, type, arg0,
9463 build_real (type, dconst2));
9465 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9466 if (flag_unsafe_math_optimizations
9467 && TREE_CODE (arg1) == PLUS_EXPR
9468 && TREE_CODE (arg0) != MULT_EXPR)
9470 tree tree10 = TREE_OPERAND (arg1, 0);
9471 tree tree11 = TREE_OPERAND (arg1, 1);
9472 if (TREE_CODE (tree11) == MULT_EXPR
9473 && TREE_CODE (tree10) == MULT_EXPR)
9476 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9477 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9480 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9481 if (flag_unsafe_math_optimizations
9482 && TREE_CODE (arg0) == PLUS_EXPR
9483 && TREE_CODE (arg1) != MULT_EXPR)
9485 tree tree00 = TREE_OPERAND (arg0, 0);
9486 tree tree01 = TREE_OPERAND (arg0, 1);
9487 if (TREE_CODE (tree01) == MULT_EXPR
9488 && TREE_CODE (tree00) == MULT_EXPR)
9491 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9492 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9498 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9499 is a rotate of A by C1 bits. */
9500 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9501 is a rotate of A by B bits. */
9503 enum tree_code code0, code1;
9504 code0 = TREE_CODE (arg0);
9505 code1 = TREE_CODE (arg1);
9506 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9507 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9508 && operand_equal_p (TREE_OPERAND (arg0, 0),
9509 TREE_OPERAND (arg1, 0), 0)
9510 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9512 tree tree01, tree11;
9513 enum tree_code code01, code11;
9515 tree01 = TREE_OPERAND (arg0, 1);
9516 tree11 = TREE_OPERAND (arg1, 1);
9517 STRIP_NOPS (tree01);
9518 STRIP_NOPS (tree11);
9519 code01 = TREE_CODE (tree01);
9520 code11 = TREE_CODE (tree11);
9521 if (code01 == INTEGER_CST
9522 && code11 == INTEGER_CST
9523 && TREE_INT_CST_HIGH (tree01) == 0
9524 && TREE_INT_CST_HIGH (tree11) == 0
9525 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9526 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9527 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9528 code0 == LSHIFT_EXPR ? tree01 : tree11);
9529 else if (code11 == MINUS_EXPR)
9531 tree tree110, tree111;
9532 tree110 = TREE_OPERAND (tree11, 0);
9533 tree111 = TREE_OPERAND (tree11, 1);
9534 STRIP_NOPS (tree110);
9535 STRIP_NOPS (tree111);
9536 if (TREE_CODE (tree110) == INTEGER_CST
9537 && 0 == compare_tree_int (tree110,
9539 (TREE_TYPE (TREE_OPERAND
9541 && operand_equal_p (tree01, tree111, 0))
9542 return build2 ((code0 == LSHIFT_EXPR
9545 type, TREE_OPERAND (arg0, 0), tree01);
9547 else if (code01 == MINUS_EXPR)
9549 tree tree010, tree011;
9550 tree010 = TREE_OPERAND (tree01, 0);
9551 tree011 = TREE_OPERAND (tree01, 1);
9552 STRIP_NOPS (tree010);
9553 STRIP_NOPS (tree011);
9554 if (TREE_CODE (tree010) == INTEGER_CST
9555 && 0 == compare_tree_int (tree010,
9557 (TREE_TYPE (TREE_OPERAND
9559 && operand_equal_p (tree11, tree011, 0))
9560 return build2 ((code0 != LSHIFT_EXPR
9563 type, TREE_OPERAND (arg0, 0), tree11);
9569 /* In most languages, can't associate operations on floats through
9570 parentheses. Rather than remember where the parentheses were, we
9571 don't associate floats at all, unless the user has specified
9572 -funsafe-math-optimizations. */
9574 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9576 tree var0, con0, lit0, minus_lit0;
9577 tree var1, con1, lit1, minus_lit1;
9580 /* Split both trees into variables, constants, and literals. Then
9581 associate each group together, the constants with literals,
9582 then the result with variables. This increases the chances of
9583 literals being recombined later and of generating relocatable
9584 expressions for the sum of a constant and literal. */
9585 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9586 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9587 code == MINUS_EXPR);
9589 /* With undefined overflow we can only associate constants
9590 with one variable. */
9591 if ((POINTER_TYPE_P (type)
9592 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9598 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9599 tmp0 = TREE_OPERAND (tmp0, 0);
9600 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9601 tmp1 = TREE_OPERAND (tmp1, 0);
9602 /* The only case we can still associate with two variables
9603 is if they are the same, modulo negation. */
9604 if (!operand_equal_p (tmp0, tmp1, 0))
9608 /* Only do something if we found more than two objects. Otherwise,
9609 nothing has changed and we risk infinite recursion. */
9611 && (2 < ((var0 != 0) + (var1 != 0)
9612 + (con0 != 0) + (con1 != 0)
9613 + (lit0 != 0) + (lit1 != 0)
9614 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9616 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9617 if (code == MINUS_EXPR)
9620 var0 = associate_trees (var0, var1, code, type);
9621 con0 = associate_trees (con0, con1, code, type);
9622 lit0 = associate_trees (lit0, lit1, code, type);
9623 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9625 /* Preserve the MINUS_EXPR if the negative part of the literal is
9626 greater than the positive part. Otherwise, the multiplicative
9627 folding code (i.e extract_muldiv) may be fooled in case
9628 unsigned constants are subtracted, like in the following
9629 example: ((X*2 + 4) - 8U)/2. */
9630 if (minus_lit0 && lit0)
9632 if (TREE_CODE (lit0) == INTEGER_CST
9633 && TREE_CODE (minus_lit0) == INTEGER_CST
9634 && tree_int_cst_lt (lit0, minus_lit0))
9636 minus_lit0 = associate_trees (minus_lit0, lit0,
9642 lit0 = associate_trees (lit0, minus_lit0,
9650 return fold_convert (type,
9651 associate_trees (var0, minus_lit0,
9655 con0 = associate_trees (con0, minus_lit0,
9657 return fold_convert (type,
9658 associate_trees (var0, con0,
9663 con0 = associate_trees (con0, lit0, code, type);
9664 return fold_convert (type, associate_trees (var0, con0,
9672 /* Pointer simplifications for subtraction, simple reassociations. */
9673 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9675 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9676 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9677 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9679 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9680 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9681 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9682 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9683 return fold_build2 (PLUS_EXPR, type,
9684 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9685 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9687 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9688 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9690 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9691 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9692 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9694 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9697 /* A - (-B) -> A + B */
9698 if (TREE_CODE (arg1) == NEGATE_EXPR)
9699 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9700 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9701 if (TREE_CODE (arg0) == NEGATE_EXPR
9702 && (FLOAT_TYPE_P (type)
9703 || INTEGRAL_TYPE_P (type))
9704 && negate_expr_p (arg1)
9705 && reorder_operands_p (arg0, arg1))
9706 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9707 TREE_OPERAND (arg0, 0));
9708 /* Convert -A - 1 to ~A. */
9709 if (INTEGRAL_TYPE_P (type)
9710 && TREE_CODE (arg0) == NEGATE_EXPR
9711 && integer_onep (arg1)
9712 && !TYPE_OVERFLOW_TRAPS (type))
9713 return fold_build1 (BIT_NOT_EXPR, type,
9714 fold_convert (type, TREE_OPERAND (arg0, 0)));
9716 /* Convert -1 - A to ~A. */
9717 if (INTEGRAL_TYPE_P (type)
9718 && integer_all_onesp (arg0))
9719 return fold_build1 (BIT_NOT_EXPR, type, op1);
9721 if (! FLOAT_TYPE_P (type))
9723 if (integer_zerop (arg0))
9724 return negate_expr (fold_convert (type, arg1));
9725 if (integer_zerop (arg1))
9726 return non_lvalue (fold_convert (type, arg0));
9728 /* Fold A - (A & B) into ~B & A. */
9729 if (!TREE_SIDE_EFFECTS (arg0)
9730 && TREE_CODE (arg1) == BIT_AND_EXPR)
9732 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9733 return fold_build2 (BIT_AND_EXPR, type,
9734 fold_build1 (BIT_NOT_EXPR, type,
9735 TREE_OPERAND (arg1, 0)),
9737 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9738 return fold_build2 (BIT_AND_EXPR, type,
9739 fold_build1 (BIT_NOT_EXPR, type,
9740 TREE_OPERAND (arg1, 1)),
9744 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9745 any power of 2 minus 1. */
9746 if (TREE_CODE (arg0) == BIT_AND_EXPR
9747 && TREE_CODE (arg1) == BIT_AND_EXPR
9748 && operand_equal_p (TREE_OPERAND (arg0, 0),
9749 TREE_OPERAND (arg1, 0), 0))
9751 tree mask0 = TREE_OPERAND (arg0, 1);
9752 tree mask1 = TREE_OPERAND (arg1, 1);
9753 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9755 if (operand_equal_p (tem, mask1, 0))
9757 tem = fold_build2 (BIT_XOR_EXPR, type,
9758 TREE_OPERAND (arg0, 0), mask1);
9759 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9764 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9765 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9766 return non_lvalue (fold_convert (type, arg0));
9768 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9769 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9770 (-ARG1 + ARG0) reduces to -ARG1. */
9771 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9772 return negate_expr (fold_convert (type, arg1));
9774 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9775 __complex__ ( x, -y ). This is not the same for SNaNs or if
9776 signed zeros are involved. */
9777 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9778 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9779 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9781 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9782 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9783 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9784 bool arg0rz = false, arg0iz = false;
9785 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9786 || (arg0i && (arg0iz = real_zerop (arg0i))))
9788 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9789 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9790 if (arg0rz && arg1i && real_zerop (arg1i))
9792 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9794 : build1 (REALPART_EXPR, rtype, arg1));
9795 tree ip = arg0i ? arg0i
9796 : build1 (IMAGPART_EXPR, rtype, arg0);
9797 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9799 else if (arg0iz && arg1r && real_zerop (arg1r))
9801 tree rp = arg0r ? arg0r
9802 : build1 (REALPART_EXPR, rtype, arg0);
9803 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9805 : build1 (IMAGPART_EXPR, rtype, arg1));
9806 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9811 /* Fold &x - &x. This can happen from &x.foo - &x.
9812 This is unsafe for certain floats even in non-IEEE formats.
9813 In IEEE, it is unsafe because it does wrong for NaNs.
9814 Also note that operand_equal_p is always false if an operand
9817 if ((! FLOAT_TYPE_P (type)
9818 || (flag_unsafe_math_optimizations
9819 && !HONOR_NANS (TYPE_MODE (type))
9820 && !HONOR_INFINITIES (TYPE_MODE (type))))
9821 && operand_equal_p (arg0, arg1, 0))
9822 return fold_convert (type, integer_zero_node);
9824 /* A - B -> A + (-B) if B is easily negatable. */
9825 if (negate_expr_p (arg1)
9826 && ((FLOAT_TYPE_P (type)
9827 /* Avoid this transformation if B is a positive REAL_CST. */
9828 && (TREE_CODE (arg1) != REAL_CST
9829 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9830 || INTEGRAL_TYPE_P (type)))
9831 return fold_build2 (PLUS_EXPR, type,
9832 fold_convert (type, arg0),
9833 fold_convert (type, negate_expr (arg1)));
9835 /* Try folding difference of addresses. */
9839 if ((TREE_CODE (arg0) == ADDR_EXPR
9840 || TREE_CODE (arg1) == ADDR_EXPR)
9841 && ptr_difference_const (arg0, arg1, &diff))
9842 return build_int_cst_type (type, diff);
9845 /* Fold &a[i] - &a[j] to i-j. */
9846 if (TREE_CODE (arg0) == ADDR_EXPR
9847 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9848 && TREE_CODE (arg1) == ADDR_EXPR
9849 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9851 tree aref0 = TREE_OPERAND (arg0, 0);
9852 tree aref1 = TREE_OPERAND (arg1, 0);
9853 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9854 TREE_OPERAND (aref1, 0), 0))
9856 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9857 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9858 tree esz = array_ref_element_size (aref0);
9859 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9860 return fold_build2 (MULT_EXPR, type, diff,
9861 fold_convert (type, esz));
9866 if (flag_unsafe_math_optimizations
9867 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9868 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9869 && (tem = distribute_real_division (code, type, arg0, arg1)))
9872 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9874 if ((TREE_CODE (arg0) == MULT_EXPR
9875 || TREE_CODE (arg1) == MULT_EXPR)
9876 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9878 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9886 /* (-A) * (-B) -> A * B */
9887 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9888 return fold_build2 (MULT_EXPR, type,
9889 fold_convert (type, TREE_OPERAND (arg0, 0)),
9890 fold_convert (type, negate_expr (arg1)));
9891 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9892 return fold_build2 (MULT_EXPR, type,
9893 fold_convert (type, negate_expr (arg0)),
9894 fold_convert (type, TREE_OPERAND (arg1, 0)));
9896 if (! FLOAT_TYPE_P (type))
9898 if (integer_zerop (arg1))
9899 return omit_one_operand (type, arg1, arg0);
9900 if (integer_onep (arg1))
9901 return non_lvalue (fold_convert (type, arg0));
9902 /* Transform x * -1 into -x. */
9903 if (integer_all_onesp (arg1))
9904 return fold_convert (type, negate_expr (arg0));
9905 /* Transform x * -C into -x * C if x is easily negatable. */
9906 if (TREE_CODE (arg1) == INTEGER_CST
9907 && tree_int_cst_sgn (arg1) == -1
9908 && negate_expr_p (arg0)
9909 && (tem = negate_expr (arg1)) != arg1
9910 && !TREE_OVERFLOW (tem))
9911 return fold_build2 (MULT_EXPR, type,
9912 negate_expr (arg0), tem);
9914 /* (a * (1 << b)) is (a << b) */
9915 if (TREE_CODE (arg1) == LSHIFT_EXPR
9916 && integer_onep (TREE_OPERAND (arg1, 0)))
9917 return fold_build2 (LSHIFT_EXPR, type, arg0,
9918 TREE_OPERAND (arg1, 1));
9919 if (TREE_CODE (arg0) == LSHIFT_EXPR
9920 && integer_onep (TREE_OPERAND (arg0, 0)))
9921 return fold_build2 (LSHIFT_EXPR, type, arg1,
9922 TREE_OPERAND (arg0, 1));
9924 strict_overflow_p = false;
9925 if (TREE_CODE (arg1) == INTEGER_CST
9926 && 0 != (tem = extract_muldiv (op0,
9927 fold_convert (type, arg1),
9929 &strict_overflow_p)))
9931 if (strict_overflow_p)
9932 fold_overflow_warning (("assuming signed overflow does not "
9933 "occur when simplifying "
9935 WARN_STRICT_OVERFLOW_MISC);
9936 return fold_convert (type, tem);
9939 /* Optimize z * conj(z) for integer complex numbers. */
9940 if (TREE_CODE (arg0) == CONJ_EXPR
9941 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9942 return fold_mult_zconjz (type, arg1);
9943 if (TREE_CODE (arg1) == CONJ_EXPR
9944 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9945 return fold_mult_zconjz (type, arg0);
9949 /* Maybe fold x * 0 to 0. The expressions aren't the same
9950 when x is NaN, since x * 0 is also NaN. Nor are they the
9951 same in modes with signed zeros, since multiplying a
9952 negative value by 0 gives -0, not +0. */
9953 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9954 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9955 && real_zerop (arg1))
9956 return omit_one_operand (type, arg1, arg0);
9957 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9958 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9959 && real_onep (arg1))
9960 return non_lvalue (fold_convert (type, arg0));
9962 /* Transform x * -1.0 into -x. */
9963 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9964 && real_minus_onep (arg1))
9965 return fold_convert (type, negate_expr (arg0));
9967 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9968 if (flag_unsafe_math_optimizations
9969 && TREE_CODE (arg0) == RDIV_EXPR
9970 && TREE_CODE (arg1) == REAL_CST
9971 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9973 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9976 return fold_build2 (RDIV_EXPR, type, tem,
9977 TREE_OPERAND (arg0, 1));
9980 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9981 if (operand_equal_p (arg0, arg1, 0))
9983 tree tem = fold_strip_sign_ops (arg0);
9984 if (tem != NULL_TREE)
9986 tem = fold_convert (type, tem);
9987 return fold_build2 (MULT_EXPR, type, tem, tem);
9991 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9992 This is not the same for NaNs or if signed zeros are
9994 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9995 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9996 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9997 && TREE_CODE (arg1) == COMPLEX_CST
9998 && real_zerop (TREE_REALPART (arg1)))
10000 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10001 if (real_onep (TREE_IMAGPART (arg1)))
10002 return fold_build2 (COMPLEX_EXPR, type,
10003 negate_expr (fold_build1 (IMAGPART_EXPR,
10005 fold_build1 (REALPART_EXPR, rtype, arg0));
10006 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10007 return fold_build2 (COMPLEX_EXPR, type,
10008 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10009 negate_expr (fold_build1 (REALPART_EXPR,
10013 /* Optimize z * conj(z) for floating point complex numbers.
10014 Guarded by flag_unsafe_math_optimizations as non-finite
10015 imaginary components don't produce scalar results. */
10016 if (flag_unsafe_math_optimizations
10017 && TREE_CODE (arg0) == CONJ_EXPR
10018 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10019 return fold_mult_zconjz (type, arg1);
10020 if (flag_unsafe_math_optimizations
10021 && TREE_CODE (arg1) == CONJ_EXPR
10022 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10023 return fold_mult_zconjz (type, arg0);
10025 if (flag_unsafe_math_optimizations)
10027 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10028 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10030 /* Optimizations of root(...)*root(...). */
10031 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10034 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10035 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10037 /* Optimize sqrt(x)*sqrt(x) as x. */
10038 if (BUILTIN_SQRT_P (fcode0)
10039 && operand_equal_p (arg00, arg10, 0)
10040 && ! HONOR_SNANS (TYPE_MODE (type)))
10043 /* Optimize root(x)*root(y) as root(x*y). */
10044 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10045 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10046 return build_call_expr (rootfn, 1, arg);
10049 /* Optimize expN(x)*expN(y) as expN(x+y). */
10050 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10052 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10053 tree arg = fold_build2 (PLUS_EXPR, type,
10054 CALL_EXPR_ARG (arg0, 0),
10055 CALL_EXPR_ARG (arg1, 0));
10056 return build_call_expr (expfn, 1, arg);
10059 /* Optimizations of pow(...)*pow(...). */
10060 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10061 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10062 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10064 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10065 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10066 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10067 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10069 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10070 if (operand_equal_p (arg01, arg11, 0))
10072 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10073 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10074 return build_call_expr (powfn, 2, arg, arg01);
10077 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10078 if (operand_equal_p (arg00, arg10, 0))
10080 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10081 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10082 return build_call_expr (powfn, 2, arg00, arg);
10086 /* Optimize tan(x)*cos(x) as sin(x). */
10087 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10088 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10089 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10090 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10091 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10092 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10093 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10094 CALL_EXPR_ARG (arg1, 0), 0))
10096 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10098 if (sinfn != NULL_TREE)
10099 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10102 /* Optimize x*pow(x,c) as pow(x,c+1). */
10103 if (fcode1 == BUILT_IN_POW
10104 || fcode1 == BUILT_IN_POWF
10105 || fcode1 == BUILT_IN_POWL)
10107 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10108 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10109 if (TREE_CODE (arg11) == REAL_CST
10110 && !TREE_OVERFLOW (arg11)
10111 && operand_equal_p (arg0, arg10, 0))
10113 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10117 c = TREE_REAL_CST (arg11);
10118 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10119 arg = build_real (type, c);
10120 return build_call_expr (powfn, 2, arg0, arg);
10124 /* Optimize pow(x,c)*x as pow(x,c+1). */
10125 if (fcode0 == BUILT_IN_POW
10126 || fcode0 == BUILT_IN_POWF
10127 || fcode0 == BUILT_IN_POWL)
10129 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10130 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10131 if (TREE_CODE (arg01) == REAL_CST
10132 && !TREE_OVERFLOW (arg01)
10133 && operand_equal_p (arg1, arg00, 0))
10135 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10139 c = TREE_REAL_CST (arg01);
10140 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10141 arg = build_real (type, c);
10142 return build_call_expr (powfn, 2, arg1, arg);
10146 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10147 if (! optimize_size
10148 && operand_equal_p (arg0, arg1, 0))
10150 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10154 tree arg = build_real (type, dconst2);
10155 return build_call_expr (powfn, 2, arg0, arg);
10164 if (integer_all_onesp (arg1))
10165 return omit_one_operand (type, arg1, arg0);
10166 if (integer_zerop (arg1))
10167 return non_lvalue (fold_convert (type, arg0));
10168 if (operand_equal_p (arg0, arg1, 0))
10169 return non_lvalue (fold_convert (type, arg0));
10171 /* ~X | X is -1. */
10172 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10173 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10175 t1 = build_int_cst_type (type, -1);
10176 return omit_one_operand (type, t1, arg1);
10179 /* X | ~X is -1. */
10180 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10181 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10183 t1 = build_int_cst_type (type, -1);
10184 return omit_one_operand (type, t1, arg0);
10187 /* Canonicalize (X & C1) | C2. */
10188 if (TREE_CODE (arg0) == BIT_AND_EXPR
10189 && TREE_CODE (arg1) == INTEGER_CST
10190 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10192 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10193 int width = TYPE_PRECISION (type);
10194 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10195 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10196 hi2 = TREE_INT_CST_HIGH (arg1);
10197 lo2 = TREE_INT_CST_LOW (arg1);
10199 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10200 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10201 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10203 if (width > HOST_BITS_PER_WIDE_INT)
10205 mhi = (unsigned HOST_WIDE_INT) -1
10206 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10212 mlo = (unsigned HOST_WIDE_INT) -1
10213 >> (HOST_BITS_PER_WIDE_INT - width);
10216 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10217 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10218 return fold_build2 (BIT_IOR_EXPR, type,
10219 TREE_OPERAND (arg0, 0), arg1);
10221 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10224 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10225 return fold_build2 (BIT_IOR_EXPR, type,
10226 fold_build2 (BIT_AND_EXPR, type,
10227 TREE_OPERAND (arg0, 0),
10228 build_int_cst_wide (type,
10234 /* (X & Y) | Y is (X, Y). */
10235 if (TREE_CODE (arg0) == BIT_AND_EXPR
10236 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10237 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10238 /* (X & Y) | X is (Y, X). */
10239 if (TREE_CODE (arg0) == BIT_AND_EXPR
10240 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10241 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10242 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10243 /* X | (X & Y) is (Y, X). */
10244 if (TREE_CODE (arg1) == BIT_AND_EXPR
10245 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10246 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10247 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10248 /* X | (Y & X) is (Y, X). */
10249 if (TREE_CODE (arg1) == BIT_AND_EXPR
10250 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10251 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10252 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10254 t1 = distribute_bit_expr (code, type, arg0, arg1);
10255 if (t1 != NULL_TREE)
10258 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10260 This results in more efficient code for machines without a NAND
10261 instruction. Combine will canonicalize to the first form
10262 which will allow use of NAND instructions provided by the
10263 backend if they exist. */
10264 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10265 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10267 return fold_build1 (BIT_NOT_EXPR, type,
10268 build2 (BIT_AND_EXPR, type,
10269 TREE_OPERAND (arg0, 0),
10270 TREE_OPERAND (arg1, 0)));
10273 /* See if this can be simplified into a rotate first. If that
10274 is unsuccessful continue in the association code. */
10278 if (integer_zerop (arg1))
10279 return non_lvalue (fold_convert (type, arg0));
10280 if (integer_all_onesp (arg1))
10281 return fold_build1 (BIT_NOT_EXPR, type, op0);
10282 if (operand_equal_p (arg0, arg1, 0))
10283 return omit_one_operand (type, integer_zero_node, arg0);
10285 /* ~X ^ X is -1. */
10286 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10287 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10289 t1 = build_int_cst_type (type, -1);
10290 return omit_one_operand (type, t1, arg1);
10293 /* X ^ ~X is -1. */
10294 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10295 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10297 t1 = build_int_cst_type (type, -1);
10298 return omit_one_operand (type, t1, arg0);
10301 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10302 with a constant, and the two constants have no bits in common,
10303 we should treat this as a BIT_IOR_EXPR since this may produce more
10304 simplifications. */
10305 if (TREE_CODE (arg0) == BIT_AND_EXPR
10306 && TREE_CODE (arg1) == BIT_AND_EXPR
10307 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10308 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10309 && integer_zerop (const_binop (BIT_AND_EXPR,
10310 TREE_OPERAND (arg0, 1),
10311 TREE_OPERAND (arg1, 1), 0)))
10313 code = BIT_IOR_EXPR;
10317 /* (X | Y) ^ X -> Y & ~ X*/
10318 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10319 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10321 tree t2 = TREE_OPERAND (arg0, 1);
10322 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10324 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10325 fold_convert (type, t1));
10329 /* (Y | X) ^ X -> Y & ~ X*/
10330 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10331 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10333 tree t2 = TREE_OPERAND (arg0, 0);
10334 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10336 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10337 fold_convert (type, t1));
10341 /* X ^ (X | Y) -> Y & ~ X*/
10342 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10343 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10345 tree t2 = TREE_OPERAND (arg1, 1);
10346 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10348 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10349 fold_convert (type, t1));
10353 /* X ^ (Y | X) -> Y & ~ X*/
10354 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10355 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10357 tree t2 = TREE_OPERAND (arg1, 0);
10358 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10360 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10361 fold_convert (type, t1));
10365 /* Convert ~X ^ ~Y to X ^ Y. */
10366 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10367 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10368 return fold_build2 (code, type,
10369 fold_convert (type, TREE_OPERAND (arg0, 0)),
10370 fold_convert (type, TREE_OPERAND (arg1, 0)));
10372 /* Convert ~X ^ C to X ^ ~C. */
10373 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10374 && TREE_CODE (arg1) == INTEGER_CST)
10375 return fold_build2 (code, type,
10376 fold_convert (type, TREE_OPERAND (arg0, 0)),
10377 fold_build1 (BIT_NOT_EXPR, type, arg1));
10379 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10380 if (TREE_CODE (arg0) == BIT_AND_EXPR
10381 && integer_onep (TREE_OPERAND (arg0, 1))
10382 && integer_onep (arg1))
10383 return fold_build2 (EQ_EXPR, type, arg0,
10384 build_int_cst (TREE_TYPE (arg0), 0));
10386 /* Fold (X & Y) ^ Y as ~X & Y. */
10387 if (TREE_CODE (arg0) == BIT_AND_EXPR
10388 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10390 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10391 return fold_build2 (BIT_AND_EXPR, type,
10392 fold_build1 (BIT_NOT_EXPR, type, tem),
10393 fold_convert (type, arg1));
10395 /* Fold (X & Y) ^ X as ~Y & X. */
10396 if (TREE_CODE (arg0) == BIT_AND_EXPR
10397 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10398 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10400 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10401 return fold_build2 (BIT_AND_EXPR, type,
10402 fold_build1 (BIT_NOT_EXPR, type, tem),
10403 fold_convert (type, arg1));
10405 /* Fold X ^ (X & Y) as X & ~Y. */
10406 if (TREE_CODE (arg1) == BIT_AND_EXPR
10407 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10409 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10410 return fold_build2 (BIT_AND_EXPR, type,
10411 fold_convert (type, arg0),
10412 fold_build1 (BIT_NOT_EXPR, type, tem));
10414 /* Fold X ^ (Y & X) as ~Y & X. */
10415 if (TREE_CODE (arg1) == BIT_AND_EXPR
10416 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10417 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10419 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10420 return fold_build2 (BIT_AND_EXPR, type,
10421 fold_build1 (BIT_NOT_EXPR, type, tem),
10422 fold_convert (type, arg0));
10425 /* See if this can be simplified into a rotate first. If that
10426 is unsuccessful continue in the association code. */
10430 if (integer_all_onesp (arg1))
10431 return non_lvalue (fold_convert (type, arg0));
10432 if (integer_zerop (arg1))
10433 return omit_one_operand (type, arg1, arg0);
10434 if (operand_equal_p (arg0, arg1, 0))
10435 return non_lvalue (fold_convert (type, arg0));
10437 /* ~X & X is always zero. */
10438 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10439 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10440 return omit_one_operand (type, integer_zero_node, arg1);
10442 /* X & ~X is always zero. */
10443 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10444 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10445 return omit_one_operand (type, integer_zero_node, arg0);
10447 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10448 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10449 && TREE_CODE (arg1) == INTEGER_CST
10450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10451 return fold_build2 (BIT_IOR_EXPR, type,
10452 fold_build2 (BIT_AND_EXPR, type,
10453 TREE_OPERAND (arg0, 0), arg1),
10454 fold_build2 (BIT_AND_EXPR, type,
10455 TREE_OPERAND (arg0, 1), arg1));
10457 /* (X | Y) & Y is (X, Y). */
10458 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10459 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10460 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10461 /* (X | Y) & X is (Y, X). */
10462 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10463 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10464 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10465 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10466 /* X & (X | Y) is (Y, X). */
10467 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10468 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10469 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10470 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10471 /* X & (Y | X) is (Y, X). */
10472 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10473 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10474 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10475 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10477 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10478 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10479 && integer_onep (TREE_OPERAND (arg0, 1))
10480 && integer_onep (arg1))
10482 tem = TREE_OPERAND (arg0, 0);
10483 return fold_build2 (EQ_EXPR, type,
10484 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10485 build_int_cst (TREE_TYPE (tem), 1)),
10486 build_int_cst (TREE_TYPE (tem), 0));
10488 /* Fold ~X & 1 as (X & 1) == 0. */
10489 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10490 && integer_onep (arg1))
10492 tem = TREE_OPERAND (arg0, 0);
10493 return fold_build2 (EQ_EXPR, type,
10494 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10495 build_int_cst (TREE_TYPE (tem), 1)),
10496 build_int_cst (TREE_TYPE (tem), 0));
10499 /* Fold (X ^ Y) & Y as ~X & Y. */
10500 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10501 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10503 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10504 return fold_build2 (BIT_AND_EXPR, type,
10505 fold_build1 (BIT_NOT_EXPR, type, tem),
10506 fold_convert (type, arg1));
10508 /* Fold (X ^ Y) & X as ~Y & X. */
10509 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10510 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10511 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10513 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10514 return fold_build2 (BIT_AND_EXPR, type,
10515 fold_build1 (BIT_NOT_EXPR, type, tem),
10516 fold_convert (type, arg1));
10518 /* Fold X & (X ^ Y) as X & ~Y. */
10519 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10520 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10522 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10523 return fold_build2 (BIT_AND_EXPR, type,
10524 fold_convert (type, arg0),
10525 fold_build1 (BIT_NOT_EXPR, type, tem));
10527 /* Fold X & (Y ^ X) as ~Y & X. */
10528 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10529 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10530 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10532 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10533 return fold_build2 (BIT_AND_EXPR, type,
10534 fold_build1 (BIT_NOT_EXPR, type, tem),
10535 fold_convert (type, arg0));
10538 t1 = distribute_bit_expr (code, type, arg0, arg1);
10539 if (t1 != NULL_TREE)
10541 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10542 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10543 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10546 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10548 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10549 && (~TREE_INT_CST_LOW (arg1)
10550 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10551 return fold_convert (type, TREE_OPERAND (arg0, 0));
10554 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10556 This results in more efficient code for machines without a NOR
10557 instruction. Combine will canonicalize to the first form
10558 which will allow use of NOR instructions provided by the
10559 backend if they exist. */
10560 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10561 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10563 return fold_build1 (BIT_NOT_EXPR, type,
10564 build2 (BIT_IOR_EXPR, type,
10565 TREE_OPERAND (arg0, 0),
10566 TREE_OPERAND (arg1, 0)));
10572 /* Don't touch a floating-point divide by zero unless the mode
10573 of the constant can represent infinity. */
10574 if (TREE_CODE (arg1) == REAL_CST
10575 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10576 && real_zerop (arg1))
10579 /* Optimize A / A to 1.0 if we don't care about
10580 NaNs or Infinities. Skip the transformation
10581 for non-real operands. */
10582 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10583 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10584 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10585 && operand_equal_p (arg0, arg1, 0))
10587 tree r = build_real (TREE_TYPE (arg0), dconst1);
10589 return omit_two_operands (type, r, arg0, arg1);
10592 /* The complex version of the above A / A optimization. */
10593 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10594 && operand_equal_p (arg0, arg1, 0))
10596 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10597 if (! HONOR_NANS (TYPE_MODE (elem_type))
10598 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10600 tree r = build_real (elem_type, dconst1);
10601 /* omit_two_operands will call fold_convert for us. */
10602 return omit_two_operands (type, r, arg0, arg1);
10606 /* (-A) / (-B) -> A / B */
10607 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10608 return fold_build2 (RDIV_EXPR, type,
10609 TREE_OPERAND (arg0, 0),
10610 negate_expr (arg1));
10611 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10612 return fold_build2 (RDIV_EXPR, type,
10613 negate_expr (arg0),
10614 TREE_OPERAND (arg1, 0));
10616 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10617 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10618 && real_onep (arg1))
10619 return non_lvalue (fold_convert (type, arg0));
10621 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10622 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10623 && real_minus_onep (arg1))
10624 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10626 /* If ARG1 is a constant, we can convert this to a multiply by the
10627 reciprocal. This does not have the same rounding properties,
10628 so only do this if -funsafe-math-optimizations. We can actually
10629 always safely do it if ARG1 is a power of two, but it's hard to
10630 tell if it is or not in a portable manner. */
10631 if (TREE_CODE (arg1) == REAL_CST)
10633 if (flag_unsafe_math_optimizations
10634 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10636 return fold_build2 (MULT_EXPR, type, arg0, tem);
10637 /* Find the reciprocal if optimizing and the result is exact. */
10641 r = TREE_REAL_CST (arg1);
10642 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10644 tem = build_real (type, r);
10645 return fold_build2 (MULT_EXPR, type,
10646 fold_convert (type, arg0), tem);
10650 /* Convert A/B/C to A/(B*C). */
10651 if (flag_unsafe_math_optimizations
10652 && TREE_CODE (arg0) == RDIV_EXPR)
10653 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10654 fold_build2 (MULT_EXPR, type,
10655 TREE_OPERAND (arg0, 1), arg1));
10657 /* Convert A/(B/C) to (A/B)*C. */
10658 if (flag_unsafe_math_optimizations
10659 && TREE_CODE (arg1) == RDIV_EXPR)
10660 return fold_build2 (MULT_EXPR, type,
10661 fold_build2 (RDIV_EXPR, type, arg0,
10662 TREE_OPERAND (arg1, 0)),
10663 TREE_OPERAND (arg1, 1));
10665 /* Convert C1/(X*C2) into (C1/C2)/X. */
10666 if (flag_unsafe_math_optimizations
10667 && TREE_CODE (arg1) == MULT_EXPR
10668 && TREE_CODE (arg0) == REAL_CST
10669 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10671 tree tem = const_binop (RDIV_EXPR, arg0,
10672 TREE_OPERAND (arg1, 1), 0);
10674 return fold_build2 (RDIV_EXPR, type, tem,
10675 TREE_OPERAND (arg1, 0));
10678 if (flag_unsafe_math_optimizations)
10680 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10681 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10683 /* Optimize sin(x)/cos(x) as tan(x). */
10684 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10685 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10686 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10687 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10688 CALL_EXPR_ARG (arg1, 0), 0))
10690 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10692 if (tanfn != NULL_TREE)
10693 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10696 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10697 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10698 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10699 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10700 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10701 CALL_EXPR_ARG (arg1, 0), 0))
10703 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10705 if (tanfn != NULL_TREE)
10707 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10708 return fold_build2 (RDIV_EXPR, type,
10709 build_real (type, dconst1), tmp);
10713 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10714 NaNs or Infinities. */
10715 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10716 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10717 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10719 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10720 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10722 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10723 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10724 && operand_equal_p (arg00, arg01, 0))
10726 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10728 if (cosfn != NULL_TREE)
10729 return build_call_expr (cosfn, 1, arg00);
10733 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10734 NaNs or Infinities. */
10735 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10736 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10737 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10739 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10740 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10742 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10743 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10744 && operand_equal_p (arg00, arg01, 0))
10746 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10748 if (cosfn != NULL_TREE)
10750 tree tmp = build_call_expr (cosfn, 1, arg00);
10751 return fold_build2 (RDIV_EXPR, type,
10752 build_real (type, dconst1),
10758 /* Optimize pow(x,c)/x as pow(x,c-1). */
10759 if (fcode0 == BUILT_IN_POW
10760 || fcode0 == BUILT_IN_POWF
10761 || fcode0 == BUILT_IN_POWL)
10763 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10764 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10765 if (TREE_CODE (arg01) == REAL_CST
10766 && !TREE_OVERFLOW (arg01)
10767 && operand_equal_p (arg1, arg00, 0))
10769 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10773 c = TREE_REAL_CST (arg01);
10774 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10775 arg = build_real (type, c);
10776 return build_call_expr (powfn, 2, arg1, arg);
10780 /* Optimize a/root(b/c) into a*root(c/b). */
10781 if (BUILTIN_ROOT_P (fcode1))
10783 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10785 if (TREE_CODE (rootarg) == RDIV_EXPR)
10787 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10788 tree b = TREE_OPERAND (rootarg, 0);
10789 tree c = TREE_OPERAND (rootarg, 1);
10791 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
10793 tmp = build_call_expr (rootfn, 1, tmp);
10794 return fold_build2 (MULT_EXPR, type, arg0, tmp);
10798 /* Optimize x/expN(y) into x*expN(-y). */
10799 if (BUILTIN_EXPONENT_P (fcode1))
10801 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10802 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10803 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10804 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10807 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10808 if (fcode1 == BUILT_IN_POW
10809 || fcode1 == BUILT_IN_POWF
10810 || fcode1 == BUILT_IN_POWL)
10812 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10813 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10814 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10815 tree neg11 = fold_convert (type, negate_expr (arg11));
10816 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10817 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10822 case TRUNC_DIV_EXPR:
10823 case FLOOR_DIV_EXPR:
10824 /* Simplify A / (B << N) where A and B are positive and B is
10825 a power of 2, to A >> (N + log2(B)). */
10826 strict_overflow_p = false;
10827 if (TREE_CODE (arg1) == LSHIFT_EXPR
10828 && (TYPE_UNSIGNED (type)
10829 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10831 tree sval = TREE_OPERAND (arg1, 0);
10832 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10834 tree sh_cnt = TREE_OPERAND (arg1, 1);
10835 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10837 if (strict_overflow_p)
10838 fold_overflow_warning (("assuming signed overflow does not "
10839 "occur when simplifying A / (B << N)"),
10840 WARN_STRICT_OVERFLOW_MISC);
10842 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10843 sh_cnt, build_int_cst (NULL_TREE, pow2));
10844 return fold_build2 (RSHIFT_EXPR, type,
10845 fold_convert (type, arg0), sh_cnt);
10850 case ROUND_DIV_EXPR:
10851 case CEIL_DIV_EXPR:
10852 case EXACT_DIV_EXPR:
10853 if (integer_onep (arg1))
10854 return non_lvalue (fold_convert (type, arg0));
10855 if (integer_zerop (arg1))
10857 /* X / -1 is -X. */
10858 if (!TYPE_UNSIGNED (type)
10859 && TREE_CODE (arg1) == INTEGER_CST
10860 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10861 && TREE_INT_CST_HIGH (arg1) == -1)
10862 return fold_convert (type, negate_expr (arg0));
10864 /* Convert -A / -B to A / B when the type is signed and overflow is
10866 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10867 && TREE_CODE (arg0) == NEGATE_EXPR
10868 && negate_expr_p (arg1))
10870 if (INTEGRAL_TYPE_P (type))
10871 fold_overflow_warning (("assuming signed overflow does not occur "
10872 "when distributing negation across "
10874 WARN_STRICT_OVERFLOW_MISC);
10875 return fold_build2 (code, type,
10876 fold_convert (type, TREE_OPERAND (arg0, 0)),
10877 negate_expr (arg1));
10879 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10880 && TREE_CODE (arg1) == NEGATE_EXPR
10881 && negate_expr_p (arg0))
10883 if (INTEGRAL_TYPE_P (type))
10884 fold_overflow_warning (("assuming signed overflow does not occur "
10885 "when distributing negation across "
10887 WARN_STRICT_OVERFLOW_MISC);
10888 return fold_build2 (code, type, negate_expr (arg0),
10889 TREE_OPERAND (arg1, 0));
10892 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10893 operation, EXACT_DIV_EXPR.
10895 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10896 At one time others generated faster code, it's not clear if they do
10897 after the last round to changes to the DIV code in expmed.c. */
10898 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10899 && multiple_of_p (type, arg0, arg1))
10900 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10902 strict_overflow_p = false;
10903 if (TREE_CODE (arg1) == INTEGER_CST
10904 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10905 &strict_overflow_p)))
10907 if (strict_overflow_p)
10908 fold_overflow_warning (("assuming signed overflow does not occur "
10909 "when simplifying division"),
10910 WARN_STRICT_OVERFLOW_MISC);
10911 return fold_convert (type, tem);
10916 case CEIL_MOD_EXPR:
10917 case FLOOR_MOD_EXPR:
10918 case ROUND_MOD_EXPR:
10919 case TRUNC_MOD_EXPR:
10920 /* X % 1 is always zero, but be sure to preserve any side
10922 if (integer_onep (arg1))
10923 return omit_one_operand (type, integer_zero_node, arg0);
10925 /* X % 0, return X % 0 unchanged so that we can get the
10926 proper warnings and errors. */
10927 if (integer_zerop (arg1))
10930 /* 0 % X is always zero, but be sure to preserve any side
10931 effects in X. Place this after checking for X == 0. */
10932 if (integer_zerop (arg0))
10933 return omit_one_operand (type, integer_zero_node, arg1);
10935 /* X % -1 is zero. */
10936 if (!TYPE_UNSIGNED (type)
10937 && TREE_CODE (arg1) == INTEGER_CST
10938 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10939 && TREE_INT_CST_HIGH (arg1) == -1)
10940 return omit_one_operand (type, integer_zero_node, arg0);
10942 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10943 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10944 strict_overflow_p = false;
10945 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10946 && (TYPE_UNSIGNED (type)
10947 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10950 /* Also optimize A % (C << N) where C is a power of 2,
10951 to A & ((C << N) - 1). */
10952 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10953 c = TREE_OPERAND (arg1, 0);
10955 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10957 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10958 build_int_cst (TREE_TYPE (arg1), 1));
10959 if (strict_overflow_p)
10960 fold_overflow_warning (("assuming signed overflow does not "
10961 "occur when simplifying "
10962 "X % (power of two)"),
10963 WARN_STRICT_OVERFLOW_MISC);
10964 return fold_build2 (BIT_AND_EXPR, type,
10965 fold_convert (type, arg0),
10966 fold_convert (type, mask));
10970 /* X % -C is the same as X % C. */
10971 if (code == TRUNC_MOD_EXPR
10972 && !TYPE_UNSIGNED (type)
10973 && TREE_CODE (arg1) == INTEGER_CST
10974 && !TREE_OVERFLOW (arg1)
10975 && TREE_INT_CST_HIGH (arg1) < 0
10976 && !TYPE_OVERFLOW_TRAPS (type)
10977 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10978 && !sign_bit_p (arg1, arg1))
10979 return fold_build2 (code, type, fold_convert (type, arg0),
10980 fold_convert (type, negate_expr (arg1)));
10982 /* X % -Y is the same as X % Y. */
10983 if (code == TRUNC_MOD_EXPR
10984 && !TYPE_UNSIGNED (type)
10985 && TREE_CODE (arg1) == NEGATE_EXPR
10986 && !TYPE_OVERFLOW_TRAPS (type))
10987 return fold_build2 (code, type, fold_convert (type, arg0),
10988 fold_convert (type, TREE_OPERAND (arg1, 0)));
10990 if (TREE_CODE (arg1) == INTEGER_CST
10991 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10992 &strict_overflow_p)))
10994 if (strict_overflow_p)
10995 fold_overflow_warning (("assuming signed overflow does not occur "
10996 "when simplifying modulos"),
10997 WARN_STRICT_OVERFLOW_MISC);
10998 return fold_convert (type, tem);
11005 if (integer_all_onesp (arg0))
11006 return omit_one_operand (type, arg0, arg1);
11010 /* Optimize -1 >> x for arithmetic right shifts. */
11011 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11012 return omit_one_operand (type, arg0, arg1);
11013 /* ... fall through ... */
11017 if (integer_zerop (arg1))
11018 return non_lvalue (fold_convert (type, arg0));
11019 if (integer_zerop (arg0))
11020 return omit_one_operand (type, arg0, arg1);
11022 /* Since negative shift count is not well-defined,
11023 don't try to compute it in the compiler. */
11024 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11027 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11028 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11029 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11030 && host_integerp (TREE_OPERAND (arg0, 1), false)
11031 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11033 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11034 + TREE_INT_CST_LOW (arg1));
11036 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11037 being well defined. */
11038 if (low >= TYPE_PRECISION (type))
11040 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11041 low = low % TYPE_PRECISION (type);
11042 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11043 return build_int_cst (type, 0);
11045 low = TYPE_PRECISION (type) - 1;
11048 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11049 build_int_cst (type, low));
11052 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11053 into x & ((unsigned)-1 >> c) for unsigned types. */
11054 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11055 || (TYPE_UNSIGNED (type)
11056 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11057 && host_integerp (arg1, false)
11058 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11059 && host_integerp (TREE_OPERAND (arg0, 1), false)
11060 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11062 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11063 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11069 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11071 lshift = build_int_cst (type, -1);
11072 lshift = int_const_binop (code, lshift, arg1, 0);
11074 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11078 /* Rewrite an LROTATE_EXPR by a constant into an
11079 RROTATE_EXPR by a new constant. */
11080 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11082 tree tem = build_int_cst (TREE_TYPE (arg1),
11083 GET_MODE_BITSIZE (TYPE_MODE (type)));
11084 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11085 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
11088 /* If we have a rotate of a bit operation with the rotate count and
11089 the second operand of the bit operation both constant,
11090 permute the two operations. */
11091 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11092 && (TREE_CODE (arg0) == BIT_AND_EXPR
11093 || TREE_CODE (arg0) == BIT_IOR_EXPR
11094 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11095 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11096 return fold_build2 (TREE_CODE (arg0), type,
11097 fold_build2 (code, type,
11098 TREE_OPERAND (arg0, 0), arg1),
11099 fold_build2 (code, type,
11100 TREE_OPERAND (arg0, 1), arg1));
11102 /* Two consecutive rotates adding up to the width of the mode can
11104 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11105 && TREE_CODE (arg0) == RROTATE_EXPR
11106 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11107 && TREE_INT_CST_HIGH (arg1) == 0
11108 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11109 && ((TREE_INT_CST_LOW (arg1)
11110 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11111 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
11112 return TREE_OPERAND (arg0, 0);
11117 if (operand_equal_p (arg0, arg1, 0))
11118 return omit_one_operand (type, arg0, arg1);
11119 if (INTEGRAL_TYPE_P (type)
11120 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11121 return omit_one_operand (type, arg1, arg0);
11122 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11128 if (operand_equal_p (arg0, arg1, 0))
11129 return omit_one_operand (type, arg0, arg1);
11130 if (INTEGRAL_TYPE_P (type)
11131 && TYPE_MAX_VALUE (type)
11132 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11133 return omit_one_operand (type, arg1, arg0);
11134 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11139 case TRUTH_ANDIF_EXPR:
11140 /* Note that the operands of this must be ints
11141 and their values must be 0 or 1.
11142 ("true" is a fixed value perhaps depending on the language.) */
11143 /* If first arg is constant zero, return it. */
11144 if (integer_zerop (arg0))
11145 return fold_convert (type, arg0);
11146 case TRUTH_AND_EXPR:
11147 /* If either arg is constant true, drop it. */
11148 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11149 return non_lvalue (fold_convert (type, arg1));
11150 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11151 /* Preserve sequence points. */
11152 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11153 return non_lvalue (fold_convert (type, arg0));
11154 /* If second arg is constant zero, result is zero, but first arg
11155 must be evaluated. */
11156 if (integer_zerop (arg1))
11157 return omit_one_operand (type, arg1, arg0);
11158 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11159 case will be handled here. */
11160 if (integer_zerop (arg0))
11161 return omit_one_operand (type, arg0, arg1);
11163 /* !X && X is always false. */
11164 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11165 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11166 return omit_one_operand (type, integer_zero_node, arg1);
11167 /* X && !X is always false. */
11168 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11169 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11170 return omit_one_operand (type, integer_zero_node, arg0);
11172 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11173 means A >= Y && A != MAX, but in this case we know that
11176 if (!TREE_SIDE_EFFECTS (arg0)
11177 && !TREE_SIDE_EFFECTS (arg1))
11179 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11180 if (tem && !operand_equal_p (tem, arg0, 0))
11181 return fold_build2 (code, type, tem, arg1);
11183 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11184 if (tem && !operand_equal_p (tem, arg1, 0))
11185 return fold_build2 (code, type, arg0, tem);
11189 /* We only do these simplifications if we are optimizing. */
11193 /* Check for things like (A || B) && (A || C). We can convert this
11194 to A || (B && C). Note that either operator can be any of the four
11195 truth and/or operations and the transformation will still be
11196 valid. Also note that we only care about order for the
11197 ANDIF and ORIF operators. If B contains side effects, this
11198 might change the truth-value of A. */
11199 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11200 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11201 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11202 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11203 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11204 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11206 tree a00 = TREE_OPERAND (arg0, 0);
11207 tree a01 = TREE_OPERAND (arg0, 1);
11208 tree a10 = TREE_OPERAND (arg1, 0);
11209 tree a11 = TREE_OPERAND (arg1, 1);
11210 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11211 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11212 && (code == TRUTH_AND_EXPR
11213 || code == TRUTH_OR_EXPR));
11215 if (operand_equal_p (a00, a10, 0))
11216 return fold_build2 (TREE_CODE (arg0), type, a00,
11217 fold_build2 (code, type, a01, a11));
11218 else if (commutative && operand_equal_p (a00, a11, 0))
11219 return fold_build2 (TREE_CODE (arg0), type, a00,
11220 fold_build2 (code, type, a01, a10));
11221 else if (commutative && operand_equal_p (a01, a10, 0))
11222 return fold_build2 (TREE_CODE (arg0), type, a01,
11223 fold_build2 (code, type, a00, a11));
11225 /* This case if tricky because we must either have commutative
11226 operators or else A10 must not have side-effects. */
11228 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11229 && operand_equal_p (a01, a11, 0))
11230 return fold_build2 (TREE_CODE (arg0), type,
11231 fold_build2 (code, type, a00, a10),
11235 /* See if we can build a range comparison. */
11236 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11239 /* Check for the possibility of merging component references. If our
11240 lhs is another similar operation, try to merge its rhs with our
11241 rhs. Then try to merge our lhs and rhs. */
11242 if (TREE_CODE (arg0) == code
11243 && 0 != (tem = fold_truthop (code, type,
11244 TREE_OPERAND (arg0, 1), arg1)))
11245 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11247 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11252 case TRUTH_ORIF_EXPR:
11253 /* Note that the operands of this must be ints
11254 and their values must be 0 or true.
11255 ("true" is a fixed value perhaps depending on the language.) */
11256 /* If first arg is constant true, return it. */
11257 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11258 return fold_convert (type, arg0);
11259 case TRUTH_OR_EXPR:
11260 /* If either arg is constant zero, drop it. */
11261 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11262 return non_lvalue (fold_convert (type, arg1));
11263 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11264 /* Preserve sequence points. */
11265 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11266 return non_lvalue (fold_convert (type, arg0));
11267 /* If second arg is constant true, result is true, but we must
11268 evaluate first arg. */
11269 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11270 return omit_one_operand (type, arg1, arg0);
11271 /* Likewise for first arg, but note this only occurs here for
11273 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11274 return omit_one_operand (type, arg0, arg1);
11276 /* !X || X is always true. */
11277 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11278 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11279 return omit_one_operand (type, integer_one_node, arg1);
11280 /* X || !X is always true. */
11281 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11282 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11283 return omit_one_operand (type, integer_one_node, arg0);
11287 case TRUTH_XOR_EXPR:
11288 /* If the second arg is constant zero, drop it. */
11289 if (integer_zerop (arg1))
11290 return non_lvalue (fold_convert (type, arg0));
11291 /* If the second arg is constant true, this is a logical inversion. */
11292 if (integer_onep (arg1))
11294 /* Only call invert_truthvalue if operand is a truth value. */
11295 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11296 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11298 tem = invert_truthvalue (arg0);
11299 return non_lvalue (fold_convert (type, tem));
11301 /* Identical arguments cancel to zero. */
11302 if (operand_equal_p (arg0, arg1, 0))
11303 return omit_one_operand (type, integer_zero_node, arg0);
11305 /* !X ^ X is always true. */
11306 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11307 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11308 return omit_one_operand (type, integer_one_node, arg1);
11310 /* X ^ !X is always true. */
11311 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11312 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11313 return omit_one_operand (type, integer_one_node, arg0);
11319 tem = fold_comparison (code, type, op0, op1);
11320 if (tem != NULL_TREE)
11323 /* bool_var != 0 becomes bool_var. */
11324 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11325 && code == NE_EXPR)
11326 return non_lvalue (fold_convert (type, arg0));
11328 /* bool_var == 1 becomes bool_var. */
11329 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11330 && code == EQ_EXPR)
11331 return non_lvalue (fold_convert (type, arg0));
11333 /* bool_var != 1 becomes !bool_var. */
11334 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11335 && code == NE_EXPR)
11336 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11338 /* bool_var == 0 becomes !bool_var. */
11339 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11340 && code == EQ_EXPR)
11341 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11343 /* If this is an equality comparison of the address of two non-weak,
11344 unaliased symbols neither of which are extern (since we do not
11345 have access to attributes for externs), then we know the result. */
11346 if (TREE_CODE (arg0) == ADDR_EXPR
11347 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11348 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11349 && ! lookup_attribute ("alias",
11350 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11351 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11352 && TREE_CODE (arg1) == ADDR_EXPR
11353 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11354 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11355 && ! lookup_attribute ("alias",
11356 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11357 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11359 /* We know that we're looking at the address of two
11360 non-weak, unaliased, static _DECL nodes.
11362 It is both wasteful and incorrect to call operand_equal_p
11363 to compare the two ADDR_EXPR nodes. It is wasteful in that
11364 all we need to do is test pointer equality for the arguments
11365 to the two ADDR_EXPR nodes. It is incorrect to use
11366 operand_equal_p as that function is NOT equivalent to a
11367 C equality test. It can in fact return false for two
11368 objects which would test as equal using the C equality
11370 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11371 return constant_boolean_node (equal
11372 ? code == EQ_EXPR : code != EQ_EXPR,
11376 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11377 a MINUS_EXPR of a constant, we can convert it into a comparison with
11378 a revised constant as long as no overflow occurs. */
11379 if (TREE_CODE (arg1) == INTEGER_CST
11380 && (TREE_CODE (arg0) == PLUS_EXPR
11381 || TREE_CODE (arg0) == MINUS_EXPR)
11382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11383 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11384 ? MINUS_EXPR : PLUS_EXPR,
11385 fold_convert (TREE_TYPE (arg0), arg1),
11386 TREE_OPERAND (arg0, 1), 0))
11387 && !TREE_OVERFLOW (tem))
11388 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11390 /* Similarly for a NEGATE_EXPR. */
11391 if (TREE_CODE (arg0) == NEGATE_EXPR
11392 && TREE_CODE (arg1) == INTEGER_CST
11393 && 0 != (tem = negate_expr (arg1))
11394 && TREE_CODE (tem) == INTEGER_CST
11395 && !TREE_OVERFLOW (tem))
11396 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11398 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11399 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11400 && TREE_CODE (arg1) == INTEGER_CST
11401 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11402 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11403 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11404 fold_convert (TREE_TYPE (arg0), arg1),
11405 TREE_OPERAND (arg0, 1)));
11407 /* Transform comparisons of the form X +- C CMP X. */
11408 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11409 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11410 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11411 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11412 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11414 tree cst = TREE_OPERAND (arg0, 1);
11416 if (code == EQ_EXPR
11417 && !integer_zerop (cst))
11418 return omit_two_operands (type, boolean_false_node,
11419 TREE_OPERAND (arg0, 0), arg1);
11421 return omit_two_operands (type, boolean_true_node,
11422 TREE_OPERAND (arg0, 0), arg1);
11425 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11426 for !=. Don't do this for ordered comparisons due to overflow. */
11427 if (TREE_CODE (arg0) == MINUS_EXPR
11428 && integer_zerop (arg1))
11429 return fold_build2 (code, type,
11430 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11432 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11433 if (TREE_CODE (arg0) == ABS_EXPR
11434 && (integer_zerop (arg1) || real_zerop (arg1)))
11435 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11437 /* If this is an EQ or NE comparison with zero and ARG0 is
11438 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11439 two operations, but the latter can be done in one less insn
11440 on machines that have only two-operand insns or on which a
11441 constant cannot be the first operand. */
11442 if (TREE_CODE (arg0) == BIT_AND_EXPR
11443 && integer_zerop (arg1))
11445 tree arg00 = TREE_OPERAND (arg0, 0);
11446 tree arg01 = TREE_OPERAND (arg0, 1);
11447 if (TREE_CODE (arg00) == LSHIFT_EXPR
11448 && integer_onep (TREE_OPERAND (arg00, 0)))
11450 fold_build2 (code, type,
11451 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11452 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11453 arg01, TREE_OPERAND (arg00, 1)),
11454 fold_convert (TREE_TYPE (arg0),
11455 integer_one_node)),
11457 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11458 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11460 fold_build2 (code, type,
11461 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11462 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11463 arg00, TREE_OPERAND (arg01, 1)),
11464 fold_convert (TREE_TYPE (arg0),
11465 integer_one_node)),
11469 /* If this is an NE or EQ comparison of zero against the result of a
11470 signed MOD operation whose second operand is a power of 2, make
11471 the MOD operation unsigned since it is simpler and equivalent. */
11472 if (integer_zerop (arg1)
11473 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11474 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11475 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11476 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11477 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11478 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11480 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11481 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11482 fold_convert (newtype,
11483 TREE_OPERAND (arg0, 0)),
11484 fold_convert (newtype,
11485 TREE_OPERAND (arg0, 1)));
11487 return fold_build2 (code, type, newmod,
11488 fold_convert (newtype, arg1));
11491 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11492 C1 is a valid shift constant, and C2 is a power of two, i.e.
11494 if (TREE_CODE (arg0) == BIT_AND_EXPR
11495 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11496 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11498 && integer_pow2p (TREE_OPERAND (arg0, 1))
11499 && integer_zerop (arg1))
11501 tree itype = TREE_TYPE (arg0);
11502 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11503 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11505 /* Check for a valid shift count. */
11506 if (TREE_INT_CST_HIGH (arg001) == 0
11507 && TREE_INT_CST_LOW (arg001) < prec)
11509 tree arg01 = TREE_OPERAND (arg0, 1);
11510 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11511 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11512 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11513 can be rewritten as (X & (C2 << C1)) != 0. */
11514 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11516 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11517 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11518 return fold_build2 (code, type, tem, arg1);
11520 /* Otherwise, for signed (arithmetic) shifts,
11521 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11522 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11523 else if (!TYPE_UNSIGNED (itype))
11524 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11525 arg000, build_int_cst (itype, 0));
11526 /* Otherwise, of unsigned (logical) shifts,
11527 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11528 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11530 return omit_one_operand (type,
11531 code == EQ_EXPR ? integer_one_node
11532 : integer_zero_node,
11537 /* If this is an NE comparison of zero with an AND of one, remove the
11538 comparison since the AND will give the correct value. */
11539 if (code == NE_EXPR
11540 && integer_zerop (arg1)
11541 && TREE_CODE (arg0) == BIT_AND_EXPR
11542 && integer_onep (TREE_OPERAND (arg0, 1)))
11543 return fold_convert (type, arg0);
11545 /* If we have (A & C) == C where C is a power of 2, convert this into
11546 (A & C) != 0. Similarly for NE_EXPR. */
11547 if (TREE_CODE (arg0) == BIT_AND_EXPR
11548 && integer_pow2p (TREE_OPERAND (arg0, 1))
11549 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11550 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11551 arg0, fold_convert (TREE_TYPE (arg0),
11552 integer_zero_node));
11554 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11555 bit, then fold the expression into A < 0 or A >= 0. */
11556 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11560 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11561 Similarly for NE_EXPR. */
11562 if (TREE_CODE (arg0) == BIT_AND_EXPR
11563 && TREE_CODE (arg1) == INTEGER_CST
11564 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11566 tree notc = fold_build1 (BIT_NOT_EXPR,
11567 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11568 TREE_OPERAND (arg0, 1));
11569 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11571 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11572 if (integer_nonzerop (dandnotc))
11573 return omit_one_operand (type, rslt, arg0);
11576 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11577 Similarly for NE_EXPR. */
11578 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11579 && TREE_CODE (arg1) == INTEGER_CST
11580 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11582 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11583 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11584 TREE_OPERAND (arg0, 1), notd);
11585 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11586 if (integer_nonzerop (candnotd))
11587 return omit_one_operand (type, rslt, arg0);
11590 /* If this is a comparison of a field, we may be able to simplify it. */
11591 if ((TREE_CODE (arg0) == COMPONENT_REF
11592 || TREE_CODE (arg0) == BIT_FIELD_REF)
11593 /* Handle the constant case even without -O
11594 to make sure the warnings are given. */
11595 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11597 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11602 /* Optimize comparisons of strlen vs zero to a compare of the
11603 first character of the string vs zero. To wit,
11604 strlen(ptr) == 0 => *ptr == 0
11605 strlen(ptr) != 0 => *ptr != 0
11606 Other cases should reduce to one of these two (or a constant)
11607 due to the return value of strlen being unsigned. */
11608 if (TREE_CODE (arg0) == CALL_EXPR
11609 && integer_zerop (arg1))
11611 tree fndecl = get_callee_fndecl (arg0);
11614 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11615 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11616 && call_expr_nargs (arg0) == 1
11617 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11619 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11620 return fold_build2 (code, type, iref,
11621 build_int_cst (TREE_TYPE (iref), 0));
11625 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11626 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11627 if (TREE_CODE (arg0) == RSHIFT_EXPR
11628 && integer_zerop (arg1)
11629 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11631 tree arg00 = TREE_OPERAND (arg0, 0);
11632 tree arg01 = TREE_OPERAND (arg0, 1);
11633 tree itype = TREE_TYPE (arg00);
11634 if (TREE_INT_CST_HIGH (arg01) == 0
11635 && TREE_INT_CST_LOW (arg01)
11636 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11638 if (TYPE_UNSIGNED (itype))
11640 itype = signed_type_for (itype);
11641 arg00 = fold_convert (itype, arg00);
11643 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11644 type, arg00, build_int_cst (itype, 0));
11648 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11649 if (integer_zerop (arg1)
11650 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11651 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11652 TREE_OPERAND (arg0, 1));
11654 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11655 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11656 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11657 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11658 build_int_cst (TREE_TYPE (arg1), 0));
11659 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11660 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11661 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11662 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11663 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11664 build_int_cst (TREE_TYPE (arg1), 0));
11666 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11667 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11668 && TREE_CODE (arg1) == INTEGER_CST
11669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11670 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11671 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11672 TREE_OPERAND (arg0, 1), arg1));
11674 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11675 (X & C) == 0 when C is a single bit. */
11676 if (TREE_CODE (arg0) == BIT_AND_EXPR
11677 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11678 && integer_zerop (arg1)
11679 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11681 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11682 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11683 TREE_OPERAND (arg0, 1));
11684 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11688 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11689 constant C is a power of two, i.e. a single bit. */
11690 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11691 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11692 && integer_zerop (arg1)
11693 && integer_pow2p (TREE_OPERAND (arg0, 1))
11694 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11695 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11697 tree arg00 = TREE_OPERAND (arg0, 0);
11698 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11699 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11702 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11703 when is C is a power of two, i.e. a single bit. */
11704 if (TREE_CODE (arg0) == BIT_AND_EXPR
11705 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11706 && integer_zerop (arg1)
11707 && integer_pow2p (TREE_OPERAND (arg0, 1))
11708 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11709 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11711 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11712 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11713 arg000, TREE_OPERAND (arg0, 1));
11714 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11715 tem, build_int_cst (TREE_TYPE (tem), 0));
11718 if (integer_zerop (arg1)
11719 && tree_expr_nonzero_p (arg0))
11721 tree res = constant_boolean_node (code==NE_EXPR, type);
11722 return omit_one_operand (type, res, arg0);
11725 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11726 if (TREE_CODE (arg0) == NEGATE_EXPR
11727 && TREE_CODE (arg1) == NEGATE_EXPR)
11728 return fold_build2 (code, type,
11729 TREE_OPERAND (arg0, 0),
11730 TREE_OPERAND (arg1, 0));
11732 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11733 if (TREE_CODE (arg0) == BIT_AND_EXPR
11734 && TREE_CODE (arg1) == BIT_AND_EXPR)
11736 tree arg00 = TREE_OPERAND (arg0, 0);
11737 tree arg01 = TREE_OPERAND (arg0, 1);
11738 tree arg10 = TREE_OPERAND (arg1, 0);
11739 tree arg11 = TREE_OPERAND (arg1, 1);
11740 tree itype = TREE_TYPE (arg0);
11742 if (operand_equal_p (arg01, arg11, 0))
11743 return fold_build2 (code, type,
11744 fold_build2 (BIT_AND_EXPR, itype,
11745 fold_build2 (BIT_XOR_EXPR, itype,
11748 build_int_cst (itype, 0));
11750 if (operand_equal_p (arg01, arg10, 0))
11751 return fold_build2 (code, type,
11752 fold_build2 (BIT_AND_EXPR, itype,
11753 fold_build2 (BIT_XOR_EXPR, itype,
11756 build_int_cst (itype, 0));
11758 if (operand_equal_p (arg00, arg11, 0))
11759 return fold_build2 (code, type,
11760 fold_build2 (BIT_AND_EXPR, itype,
11761 fold_build2 (BIT_XOR_EXPR, itype,
11764 build_int_cst (itype, 0));
11766 if (operand_equal_p (arg00, arg10, 0))
11767 return fold_build2 (code, type,
11768 fold_build2 (BIT_AND_EXPR, itype,
11769 fold_build2 (BIT_XOR_EXPR, itype,
11772 build_int_cst (itype, 0));
11775 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11776 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11778 tree arg00 = TREE_OPERAND (arg0, 0);
11779 tree arg01 = TREE_OPERAND (arg0, 1);
11780 tree arg10 = TREE_OPERAND (arg1, 0);
11781 tree arg11 = TREE_OPERAND (arg1, 1);
11782 tree itype = TREE_TYPE (arg0);
11784 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11785 operand_equal_p guarantees no side-effects so we don't need
11786 to use omit_one_operand on Z. */
11787 if (operand_equal_p (arg01, arg11, 0))
11788 return fold_build2 (code, type, arg00, arg10);
11789 if (operand_equal_p (arg01, arg10, 0))
11790 return fold_build2 (code, type, arg00, arg11);
11791 if (operand_equal_p (arg00, arg11, 0))
11792 return fold_build2 (code, type, arg01, arg10);
11793 if (operand_equal_p (arg00, arg10, 0))
11794 return fold_build2 (code, type, arg01, arg11);
11796 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11797 if (TREE_CODE (arg01) == INTEGER_CST
11798 && TREE_CODE (arg11) == INTEGER_CST)
11799 return fold_build2 (code, type,
11800 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11801 fold_build2 (BIT_XOR_EXPR, itype,
11806 /* Attempt to simplify equality/inequality comparisons of complex
11807 values. Only lower the comparison if the result is known or
11808 can be simplified to a single scalar comparison. */
11809 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11810 || TREE_CODE (arg0) == COMPLEX_CST)
11811 && (TREE_CODE (arg1) == COMPLEX_EXPR
11812 || TREE_CODE (arg1) == COMPLEX_CST))
11814 tree real0, imag0, real1, imag1;
11817 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11819 real0 = TREE_OPERAND (arg0, 0);
11820 imag0 = TREE_OPERAND (arg0, 1);
11824 real0 = TREE_REALPART (arg0);
11825 imag0 = TREE_IMAGPART (arg0);
11828 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11830 real1 = TREE_OPERAND (arg1, 0);
11831 imag1 = TREE_OPERAND (arg1, 1);
11835 real1 = TREE_REALPART (arg1);
11836 imag1 = TREE_IMAGPART (arg1);
11839 rcond = fold_binary (code, type, real0, real1);
11840 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11842 if (integer_zerop (rcond))
11844 if (code == EQ_EXPR)
11845 return omit_two_operands (type, boolean_false_node,
11847 return fold_build2 (NE_EXPR, type, imag0, imag1);
11851 if (code == NE_EXPR)
11852 return omit_two_operands (type, boolean_true_node,
11854 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11858 icond = fold_binary (code, type, imag0, imag1);
11859 if (icond && TREE_CODE (icond) == INTEGER_CST)
11861 if (integer_zerop (icond))
11863 if (code == EQ_EXPR)
11864 return omit_two_operands (type, boolean_false_node,
11866 return fold_build2 (NE_EXPR, type, real0, real1);
11870 if (code == NE_EXPR)
11871 return omit_two_operands (type, boolean_true_node,
11873 return fold_build2 (EQ_EXPR, type, real0, real1);
11884 tem = fold_comparison (code, type, op0, op1);
11885 if (tem != NULL_TREE)
11888 /* Transform comparisons of the form X +- C CMP X. */
11889 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11890 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11891 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11892 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11893 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11894 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11896 tree arg01 = TREE_OPERAND (arg0, 1);
11897 enum tree_code code0 = TREE_CODE (arg0);
11900 if (TREE_CODE (arg01) == REAL_CST)
11901 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11903 is_positive = tree_int_cst_sgn (arg01);
11905 /* (X - c) > X becomes false. */
11906 if (code == GT_EXPR
11907 && ((code0 == MINUS_EXPR && is_positive >= 0)
11908 || (code0 == PLUS_EXPR && is_positive <= 0)))
11910 if (TREE_CODE (arg01) == INTEGER_CST
11911 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11912 fold_overflow_warning (("assuming signed overflow does not "
11913 "occur when assuming that (X - c) > X "
11914 "is always false"),
11915 WARN_STRICT_OVERFLOW_ALL);
11916 return constant_boolean_node (0, type);
11919 /* Likewise (X + c) < X becomes false. */
11920 if (code == LT_EXPR
11921 && ((code0 == PLUS_EXPR && is_positive >= 0)
11922 || (code0 == MINUS_EXPR && is_positive <= 0)))
11924 if (TREE_CODE (arg01) == INTEGER_CST
11925 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11926 fold_overflow_warning (("assuming signed overflow does not "
11927 "occur when assuming that "
11928 "(X + c) < X is always false"),
11929 WARN_STRICT_OVERFLOW_ALL);
11930 return constant_boolean_node (0, type);
11933 /* Convert (X - c) <= X to true. */
11934 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11936 && ((code0 == MINUS_EXPR && is_positive >= 0)
11937 || (code0 == PLUS_EXPR && is_positive <= 0)))
11939 if (TREE_CODE (arg01) == INTEGER_CST
11940 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11941 fold_overflow_warning (("assuming signed overflow does not "
11942 "occur when assuming that "
11943 "(X - c) <= X is always true"),
11944 WARN_STRICT_OVERFLOW_ALL);
11945 return constant_boolean_node (1, type);
11948 /* Convert (X + c) >= X to true. */
11949 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11951 && ((code0 == PLUS_EXPR && is_positive >= 0)
11952 || (code0 == MINUS_EXPR && is_positive <= 0)))
11954 if (TREE_CODE (arg01) == INTEGER_CST
11955 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11956 fold_overflow_warning (("assuming signed overflow does not "
11957 "occur when assuming that "
11958 "(X + c) >= X is always true"),
11959 WARN_STRICT_OVERFLOW_ALL);
11960 return constant_boolean_node (1, type);
11963 if (TREE_CODE (arg01) == INTEGER_CST)
11965 /* Convert X + c > X and X - c < X to true for integers. */
11966 if (code == GT_EXPR
11967 && ((code0 == PLUS_EXPR && is_positive > 0)
11968 || (code0 == MINUS_EXPR && is_positive < 0)))
11970 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11971 fold_overflow_warning (("assuming signed overflow does "
11972 "not occur when assuming that "
11973 "(X + c) > X is always true"),
11974 WARN_STRICT_OVERFLOW_ALL);
11975 return constant_boolean_node (1, type);
11978 if (code == LT_EXPR
11979 && ((code0 == MINUS_EXPR && is_positive > 0)
11980 || (code0 == PLUS_EXPR && is_positive < 0)))
11982 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11983 fold_overflow_warning (("assuming signed overflow does "
11984 "not occur when assuming that "
11985 "(X - c) < X is always true"),
11986 WARN_STRICT_OVERFLOW_ALL);
11987 return constant_boolean_node (1, type);
11990 /* Convert X + c <= X and X - c >= X to false for integers. */
11991 if (code == LE_EXPR
11992 && ((code0 == PLUS_EXPR && is_positive > 0)
11993 || (code0 == MINUS_EXPR && is_positive < 0)))
11995 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11996 fold_overflow_warning (("assuming signed overflow does "
11997 "not occur when assuming that "
11998 "(X + c) <= X is always false"),
11999 WARN_STRICT_OVERFLOW_ALL);
12000 return constant_boolean_node (0, type);
12003 if (code == GE_EXPR
12004 && ((code0 == MINUS_EXPR && is_positive > 0)
12005 || (code0 == PLUS_EXPR && is_positive < 0)))
12007 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12008 fold_overflow_warning (("assuming signed overflow does "
12009 "not occur when assuming that "
12010 "(X - c) >= X is always false"),
12011 WARN_STRICT_OVERFLOW_ALL);
12012 return constant_boolean_node (0, type);
12017 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12018 This transformation affects the cases which are handled in later
12019 optimizations involving comparisons with non-negative constants. */
12020 if (TREE_CODE (arg1) == INTEGER_CST
12021 && TREE_CODE (arg0) != INTEGER_CST
12022 && tree_int_cst_sgn (arg1) > 0)
12024 if (code == GE_EXPR)
12026 arg1 = const_binop (MINUS_EXPR, arg1,
12027 build_int_cst (TREE_TYPE (arg1), 1), 0);
12028 return fold_build2 (GT_EXPR, type, arg0,
12029 fold_convert (TREE_TYPE (arg0), arg1));
12031 if (code == LT_EXPR)
12033 arg1 = const_binop (MINUS_EXPR, arg1,
12034 build_int_cst (TREE_TYPE (arg1), 1), 0);
12035 return fold_build2 (LE_EXPR, type, arg0,
12036 fold_convert (TREE_TYPE (arg0), arg1));
12040 /* Comparisons with the highest or lowest possible integer of
12041 the specified precision will have known values. */
12043 tree arg1_type = TREE_TYPE (arg1);
12044 unsigned int width = TYPE_PRECISION (arg1_type);
12046 if (TREE_CODE (arg1) == INTEGER_CST
12047 && !TREE_OVERFLOW (arg1)
12048 && width <= 2 * HOST_BITS_PER_WIDE_INT
12049 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12051 HOST_WIDE_INT signed_max_hi;
12052 unsigned HOST_WIDE_INT signed_max_lo;
12053 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12055 if (width <= HOST_BITS_PER_WIDE_INT)
12057 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12062 if (TYPE_UNSIGNED (arg1_type))
12064 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12070 max_lo = signed_max_lo;
12071 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12077 width -= HOST_BITS_PER_WIDE_INT;
12078 signed_max_lo = -1;
12079 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12084 if (TYPE_UNSIGNED (arg1_type))
12086 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12091 max_hi = signed_max_hi;
12092 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12096 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12097 && TREE_INT_CST_LOW (arg1) == max_lo)
12101 return omit_one_operand (type, integer_zero_node, arg0);
12104 return fold_build2 (EQ_EXPR, type, op0, op1);
12107 return omit_one_operand (type, integer_one_node, arg0);
12110 return fold_build2 (NE_EXPR, type, op0, op1);
12112 /* The GE_EXPR and LT_EXPR cases above are not normally
12113 reached because of previous transformations. */
12118 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12120 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12124 arg1 = const_binop (PLUS_EXPR, arg1,
12125 build_int_cst (TREE_TYPE (arg1), 1), 0);
12126 return fold_build2 (EQ_EXPR, type,
12127 fold_convert (TREE_TYPE (arg1), arg0),
12130 arg1 = const_binop (PLUS_EXPR, arg1,
12131 build_int_cst (TREE_TYPE (arg1), 1), 0);
12132 return fold_build2 (NE_EXPR, type,
12133 fold_convert (TREE_TYPE (arg1), arg0),
12138 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12140 && TREE_INT_CST_LOW (arg1) == min_lo)
12144 return omit_one_operand (type, integer_zero_node, arg0);
12147 return fold_build2 (EQ_EXPR, type, op0, op1);
12150 return omit_one_operand (type, integer_one_node, arg0);
12153 return fold_build2 (NE_EXPR, type, op0, op1);
12158 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12160 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12164 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12165 return fold_build2 (NE_EXPR, type,
12166 fold_convert (TREE_TYPE (arg1), arg0),
12169 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12170 return fold_build2 (EQ_EXPR, type,
12171 fold_convert (TREE_TYPE (arg1), arg0),
12177 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12178 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12179 && TYPE_UNSIGNED (arg1_type)
12180 /* We will flip the signedness of the comparison operator
12181 associated with the mode of arg1, so the sign bit is
12182 specified by this mode. Check that arg1 is the signed
12183 max associated with this sign bit. */
12184 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12185 /* signed_type does not work on pointer types. */
12186 && INTEGRAL_TYPE_P (arg1_type))
12188 /* The following case also applies to X < signed_max+1
12189 and X >= signed_max+1 because previous transformations. */
12190 if (code == LE_EXPR || code == GT_EXPR)
12193 st = signed_type_for (TREE_TYPE (arg1));
12194 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12195 type, fold_convert (st, arg0),
12196 build_int_cst (st, 0));
12202 /* If we are comparing an ABS_EXPR with a constant, we can
12203 convert all the cases into explicit comparisons, but they may
12204 well not be faster than doing the ABS and one comparison.
12205 But ABS (X) <= C is a range comparison, which becomes a subtraction
12206 and a comparison, and is probably faster. */
12207 if (code == LE_EXPR
12208 && TREE_CODE (arg1) == INTEGER_CST
12209 && TREE_CODE (arg0) == ABS_EXPR
12210 && ! TREE_SIDE_EFFECTS (arg0)
12211 && (0 != (tem = negate_expr (arg1)))
12212 && TREE_CODE (tem) == INTEGER_CST
12213 && !TREE_OVERFLOW (tem))
12214 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12215 build2 (GE_EXPR, type,
12216 TREE_OPERAND (arg0, 0), tem),
12217 build2 (LE_EXPR, type,
12218 TREE_OPERAND (arg0, 0), arg1));
12220 /* Convert ABS_EXPR<x> >= 0 to true. */
12221 strict_overflow_p = false;
12222 if (code == GE_EXPR
12223 && (integer_zerop (arg1)
12224 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12225 && real_zerop (arg1)))
12226 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12228 if (strict_overflow_p)
12229 fold_overflow_warning (("assuming signed overflow does not occur "
12230 "when simplifying comparison of "
12231 "absolute value and zero"),
12232 WARN_STRICT_OVERFLOW_CONDITIONAL);
12233 return omit_one_operand (type, integer_one_node, arg0);
12236 /* Convert ABS_EXPR<x> < 0 to false. */
12237 strict_overflow_p = false;
12238 if (code == LT_EXPR
12239 && (integer_zerop (arg1) || real_zerop (arg1))
12240 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12242 if (strict_overflow_p)
12243 fold_overflow_warning (("assuming signed overflow does not occur "
12244 "when simplifying comparison of "
12245 "absolute value and zero"),
12246 WARN_STRICT_OVERFLOW_CONDITIONAL);
12247 return omit_one_operand (type, integer_zero_node, arg0);
12250 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12251 and similarly for >= into !=. */
12252 if ((code == LT_EXPR || code == GE_EXPR)
12253 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12254 && TREE_CODE (arg1) == LSHIFT_EXPR
12255 && integer_onep (TREE_OPERAND (arg1, 0)))
12256 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12257 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12258 TREE_OPERAND (arg1, 1)),
12259 build_int_cst (TREE_TYPE (arg0), 0));
12261 if ((code == LT_EXPR || code == GE_EXPR)
12262 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12263 && (TREE_CODE (arg1) == NOP_EXPR
12264 || TREE_CODE (arg1) == CONVERT_EXPR)
12265 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12266 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12268 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12269 fold_convert (TREE_TYPE (arg0),
12270 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12271 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12273 build_int_cst (TREE_TYPE (arg0), 0));
12277 case UNORDERED_EXPR:
12285 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12287 t1 = fold_relational_const (code, type, arg0, arg1);
12288 if (t1 != NULL_TREE)
12292 /* If the first operand is NaN, the result is constant. */
12293 if (TREE_CODE (arg0) == REAL_CST
12294 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12295 && (code != LTGT_EXPR || ! flag_trapping_math))
12297 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12298 ? integer_zero_node
12299 : integer_one_node;
12300 return omit_one_operand (type, t1, arg1);
12303 /* If the second operand is NaN, the result is constant. */
12304 if (TREE_CODE (arg1) == REAL_CST
12305 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12306 && (code != LTGT_EXPR || ! flag_trapping_math))
12308 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12309 ? integer_zero_node
12310 : integer_one_node;
12311 return omit_one_operand (type, t1, arg0);
12314 /* Simplify unordered comparison of something with itself. */
12315 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12316 && operand_equal_p (arg0, arg1, 0))
12317 return constant_boolean_node (1, type);
12319 if (code == LTGT_EXPR
12320 && !flag_trapping_math
12321 && operand_equal_p (arg0, arg1, 0))
12322 return constant_boolean_node (0, type);
12324 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12326 tree targ0 = strip_float_extensions (arg0);
12327 tree targ1 = strip_float_extensions (arg1);
12328 tree newtype = TREE_TYPE (targ0);
12330 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12331 newtype = TREE_TYPE (targ1);
12333 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12334 return fold_build2 (code, type, fold_convert (newtype, targ0),
12335 fold_convert (newtype, targ1));
12340 case COMPOUND_EXPR:
12341 /* When pedantic, a compound expression can be neither an lvalue
12342 nor an integer constant expression. */
12343 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12345 /* Don't let (0, 0) be null pointer constant. */
12346 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12347 : fold_convert (type, arg1);
12348 return pedantic_non_lvalue (tem);
12351 if ((TREE_CODE (arg0) == REAL_CST
12352 && TREE_CODE (arg1) == REAL_CST)
12353 || (TREE_CODE (arg0) == INTEGER_CST
12354 && TREE_CODE (arg1) == INTEGER_CST))
12355 return build_complex (type, arg0, arg1);
12359 /* An ASSERT_EXPR should never be passed to fold_binary. */
12360 gcc_unreachable ();
12364 } /* switch (code) */
12367 /* Callback for walk_tree, looking for LABEL_EXPR.
12368 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12369 Do not check the sub-tree of GOTO_EXPR. */
12372 contains_label_1 (tree *tp,
12373 int *walk_subtrees,
12374 void *data ATTRIBUTE_UNUSED)
12376 switch (TREE_CODE (*tp))
12381 *walk_subtrees = 0;
12388 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12389 accessible from outside the sub-tree. Returns NULL_TREE if no
12390 addressable label is found. */
12393 contains_label_p (tree st)
12395 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12398 /* Fold a ternary expression of code CODE and type TYPE with operands
12399 OP0, OP1, and OP2. Return the folded expression if folding is
12400 successful. Otherwise, return NULL_TREE. */
12403 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12406 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12407 enum tree_code_class kind = TREE_CODE_CLASS (code);
12409 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12410 && TREE_CODE_LENGTH (code) == 3);
12412 /* Strip any conversions that don't change the mode. This is safe
12413 for every expression, except for a comparison expression because
12414 its signedness is derived from its operands. So, in the latter
12415 case, only strip conversions that don't change the signedness.
12417 Note that this is done as an internal manipulation within the
12418 constant folder, in order to find the simplest representation of
12419 the arguments so that their form can be studied. In any cases,
12420 the appropriate type conversions should be put back in the tree
12421 that will get out of the constant folder. */
12436 case COMPONENT_REF:
12437 if (TREE_CODE (arg0) == CONSTRUCTOR
12438 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12440 unsigned HOST_WIDE_INT idx;
12442 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12449 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12450 so all simple results must be passed through pedantic_non_lvalue. */
12451 if (TREE_CODE (arg0) == INTEGER_CST)
12453 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12454 tem = integer_zerop (arg0) ? op2 : op1;
12455 /* Only optimize constant conditions when the selected branch
12456 has the same type as the COND_EXPR. This avoids optimizing
12457 away "c ? x : throw", where the throw has a void type.
12458 Avoid throwing away that operand which contains label. */
12459 if ((!TREE_SIDE_EFFECTS (unused_op)
12460 || !contains_label_p (unused_op))
12461 && (! VOID_TYPE_P (TREE_TYPE (tem))
12462 || VOID_TYPE_P (type)))
12463 return pedantic_non_lvalue (tem);
12466 if (operand_equal_p (arg1, op2, 0))
12467 return pedantic_omit_one_operand (type, arg1, arg0);
12469 /* If we have A op B ? A : C, we may be able to convert this to a
12470 simpler expression, depending on the operation and the values
12471 of B and C. Signed zeros prevent all of these transformations,
12472 for reasons given above each one.
12474 Also try swapping the arguments and inverting the conditional. */
12475 if (COMPARISON_CLASS_P (arg0)
12476 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12477 arg1, TREE_OPERAND (arg0, 1))
12478 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12480 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12485 if (COMPARISON_CLASS_P (arg0)
12486 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12488 TREE_OPERAND (arg0, 1))
12489 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12491 tem = fold_truth_not_expr (arg0);
12492 if (tem && COMPARISON_CLASS_P (tem))
12494 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12500 /* If the second operand is simpler than the third, swap them
12501 since that produces better jump optimization results. */
12502 if (truth_value_p (TREE_CODE (arg0))
12503 && tree_swap_operands_p (op1, op2, false))
12505 /* See if this can be inverted. If it can't, possibly because
12506 it was a floating-point inequality comparison, don't do
12508 tem = fold_truth_not_expr (arg0);
12510 return fold_build3 (code, type, tem, op2, op1);
12513 /* Convert A ? 1 : 0 to simply A. */
12514 if (integer_onep (op1)
12515 && integer_zerop (op2)
12516 /* If we try to convert OP0 to our type, the
12517 call to fold will try to move the conversion inside
12518 a COND, which will recurse. In that case, the COND_EXPR
12519 is probably the best choice, so leave it alone. */
12520 && type == TREE_TYPE (arg0))
12521 return pedantic_non_lvalue (arg0);
12523 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12524 over COND_EXPR in cases such as floating point comparisons. */
12525 if (integer_zerop (op1)
12526 && integer_onep (op2)
12527 && truth_value_p (TREE_CODE (arg0)))
12528 return pedantic_non_lvalue (fold_convert (type,
12529 invert_truthvalue (arg0)));
12531 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12532 if (TREE_CODE (arg0) == LT_EXPR
12533 && integer_zerop (TREE_OPERAND (arg0, 1))
12534 && integer_zerop (op2)
12535 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12537 /* sign_bit_p only checks ARG1 bits within A's precision.
12538 If <sign bit of A> has wider type than A, bits outside
12539 of A's precision in <sign bit of A> need to be checked.
12540 If they are all 0, this optimization needs to be done
12541 in unsigned A's type, if they are all 1 in signed A's type,
12542 otherwise this can't be done. */
12543 if (TYPE_PRECISION (TREE_TYPE (tem))
12544 < TYPE_PRECISION (TREE_TYPE (arg1))
12545 && TYPE_PRECISION (TREE_TYPE (tem))
12546 < TYPE_PRECISION (type))
12548 unsigned HOST_WIDE_INT mask_lo;
12549 HOST_WIDE_INT mask_hi;
12550 int inner_width, outer_width;
12553 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12554 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12555 if (outer_width > TYPE_PRECISION (type))
12556 outer_width = TYPE_PRECISION (type);
12558 if (outer_width > HOST_BITS_PER_WIDE_INT)
12560 mask_hi = ((unsigned HOST_WIDE_INT) -1
12561 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12567 mask_lo = ((unsigned HOST_WIDE_INT) -1
12568 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12570 if (inner_width > HOST_BITS_PER_WIDE_INT)
12572 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12573 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12577 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12578 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12580 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12581 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12583 tem_type = signed_type_for (TREE_TYPE (tem));
12584 tem = fold_convert (tem_type, tem);
12586 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12587 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12589 tem_type = unsigned_type_for (TREE_TYPE (tem));
12590 tem = fold_convert (tem_type, tem);
12597 return fold_convert (type,
12598 fold_build2 (BIT_AND_EXPR,
12599 TREE_TYPE (tem), tem,
12600 fold_convert (TREE_TYPE (tem),
12604 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12605 already handled above. */
12606 if (TREE_CODE (arg0) == BIT_AND_EXPR
12607 && integer_onep (TREE_OPERAND (arg0, 1))
12608 && integer_zerop (op2)
12609 && integer_pow2p (arg1))
12611 tree tem = TREE_OPERAND (arg0, 0);
12613 if (TREE_CODE (tem) == RSHIFT_EXPR
12614 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12615 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12616 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12617 return fold_build2 (BIT_AND_EXPR, type,
12618 TREE_OPERAND (tem, 0), arg1);
12621 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12622 is probably obsolete because the first operand should be a
12623 truth value (that's why we have the two cases above), but let's
12624 leave it in until we can confirm this for all front-ends. */
12625 if (integer_zerop (op2)
12626 && TREE_CODE (arg0) == NE_EXPR
12627 && integer_zerop (TREE_OPERAND (arg0, 1))
12628 && integer_pow2p (arg1)
12629 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12630 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12631 arg1, OEP_ONLY_CONST))
12632 return pedantic_non_lvalue (fold_convert (type,
12633 TREE_OPERAND (arg0, 0)));
12635 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12636 if (integer_zerop (op2)
12637 && truth_value_p (TREE_CODE (arg0))
12638 && truth_value_p (TREE_CODE (arg1)))
12639 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12640 fold_convert (type, arg0),
12643 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12644 if (integer_onep (op2)
12645 && truth_value_p (TREE_CODE (arg0))
12646 && truth_value_p (TREE_CODE (arg1)))
12648 /* Only perform transformation if ARG0 is easily inverted. */
12649 tem = fold_truth_not_expr (arg0);
12651 return fold_build2 (TRUTH_ORIF_EXPR, type,
12652 fold_convert (type, tem),
12656 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12657 if (integer_zerop (arg1)
12658 && truth_value_p (TREE_CODE (arg0))
12659 && truth_value_p (TREE_CODE (op2)))
12661 /* Only perform transformation if ARG0 is easily inverted. */
12662 tem = fold_truth_not_expr (arg0);
12664 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12665 fold_convert (type, tem),
12669 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12670 if (integer_onep (arg1)
12671 && truth_value_p (TREE_CODE (arg0))
12672 && truth_value_p (TREE_CODE (op2)))
12673 return fold_build2 (TRUTH_ORIF_EXPR, type,
12674 fold_convert (type, arg0),
12680 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12681 of fold_ternary on them. */
12682 gcc_unreachable ();
12684 case BIT_FIELD_REF:
12685 if ((TREE_CODE (arg0) == VECTOR_CST
12686 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12687 && type == TREE_TYPE (TREE_TYPE (arg0))
12688 && host_integerp (arg1, 1)
12689 && host_integerp (op2, 1))
12691 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12692 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12695 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12696 && (idx % width) == 0
12697 && (idx = idx / width)
12698 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12700 tree elements = NULL_TREE;
12702 if (TREE_CODE (arg0) == VECTOR_CST)
12703 elements = TREE_VECTOR_CST_ELTS (arg0);
12706 unsigned HOST_WIDE_INT idx;
12709 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12710 elements = tree_cons (NULL_TREE, value, elements);
12712 while (idx-- > 0 && elements)
12713 elements = TREE_CHAIN (elements);
12715 return TREE_VALUE (elements);
12717 return fold_convert (type, integer_zero_node);
12724 } /* switch (code) */
12727 /* Perform constant folding and related simplification of EXPR.
12728 The related simplifications include x*1 => x, x*0 => 0, etc.,
12729 and application of the associative law.
12730 NOP_EXPR conversions may be removed freely (as long as we
12731 are careful not to change the type of the overall expression).
12732 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12733 but we can constant-fold them if they have constant operands. */
12735 #ifdef ENABLE_FOLD_CHECKING
12736 # define fold(x) fold_1 (x)
12737 static tree fold_1 (tree);
12743 const tree t = expr;
12744 enum tree_code code = TREE_CODE (t);
12745 enum tree_code_class kind = TREE_CODE_CLASS (code);
12748 /* Return right away if a constant. */
12749 if (kind == tcc_constant)
12752 /* CALL_EXPR-like objects with variable numbers of operands are
12753 treated specially. */
12754 if (kind == tcc_vl_exp)
12756 if (code == CALL_EXPR)
12758 tem = fold_call_expr (expr, false);
12759 return tem ? tem : expr;
12764 if (IS_EXPR_CODE_CLASS (kind)
12765 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12767 tree type = TREE_TYPE (t);
12768 tree op0, op1, op2;
12770 switch (TREE_CODE_LENGTH (code))
12773 op0 = TREE_OPERAND (t, 0);
12774 tem = fold_unary (code, type, op0);
12775 return tem ? tem : expr;
12777 op0 = TREE_OPERAND (t, 0);
12778 op1 = TREE_OPERAND (t, 1);
12779 tem = fold_binary (code, type, op0, op1);
12780 return tem ? tem : expr;
12782 op0 = TREE_OPERAND (t, 0);
12783 op1 = TREE_OPERAND (t, 1);
12784 op2 = TREE_OPERAND (t, 2);
12785 tem = fold_ternary (code, type, op0, op1, op2);
12786 return tem ? tem : expr;
12795 return fold (DECL_INITIAL (t));
12799 } /* switch (code) */
12802 #ifdef ENABLE_FOLD_CHECKING
12805 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12806 static void fold_check_failed (tree, tree);
12807 void print_fold_checksum (tree);
12809 /* When --enable-checking=fold, compute a digest of expr before
12810 and after actual fold call to see if fold did not accidentally
12811 change original expr. */
12817 struct md5_ctx ctx;
12818 unsigned char checksum_before[16], checksum_after[16];
12821 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12822 md5_init_ctx (&ctx);
12823 fold_checksum_tree (expr, &ctx, ht);
12824 md5_finish_ctx (&ctx, checksum_before);
12827 ret = fold_1 (expr);
12829 md5_init_ctx (&ctx);
12830 fold_checksum_tree (expr, &ctx, ht);
12831 md5_finish_ctx (&ctx, checksum_after);
12834 if (memcmp (checksum_before, checksum_after, 16))
12835 fold_check_failed (expr, ret);
12841 print_fold_checksum (tree expr)
12843 struct md5_ctx ctx;
12844 unsigned char checksum[16], cnt;
12847 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12848 md5_init_ctx (&ctx);
12849 fold_checksum_tree (expr, &ctx, ht);
12850 md5_finish_ctx (&ctx, checksum);
12852 for (cnt = 0; cnt < 16; ++cnt)
12853 fprintf (stderr, "%02x", checksum[cnt]);
12854 putc ('\n', stderr);
12858 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12860 internal_error ("fold check: original tree changed by fold");
12864 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12867 enum tree_code code;
12868 struct tree_function_decl buf;
12873 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12874 <= sizeof (struct tree_function_decl))
12875 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12878 slot = htab_find_slot (ht, expr, INSERT);
12882 code = TREE_CODE (expr);
12883 if (TREE_CODE_CLASS (code) == tcc_declaration
12884 && DECL_ASSEMBLER_NAME_SET_P (expr))
12886 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12887 memcpy ((char *) &buf, expr, tree_size (expr));
12888 expr = (tree) &buf;
12889 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12891 else if (TREE_CODE_CLASS (code) == tcc_type
12892 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12893 || TYPE_CACHED_VALUES_P (expr)
12894 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12896 /* Allow these fields to be modified. */
12897 memcpy ((char *) &buf, expr, tree_size (expr));
12898 expr = (tree) &buf;
12899 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12900 TYPE_POINTER_TO (expr) = NULL;
12901 TYPE_REFERENCE_TO (expr) = NULL;
12902 if (TYPE_CACHED_VALUES_P (expr))
12904 TYPE_CACHED_VALUES_P (expr) = 0;
12905 TYPE_CACHED_VALUES (expr) = NULL;
12908 md5_process_bytes (expr, tree_size (expr), ctx);
12909 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12910 if (TREE_CODE_CLASS (code) != tcc_type
12911 && TREE_CODE_CLASS (code) != tcc_declaration
12912 && code != TREE_LIST
12913 && code != SSA_NAME)
12914 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12915 switch (TREE_CODE_CLASS (code))
12921 md5_process_bytes (TREE_STRING_POINTER (expr),
12922 TREE_STRING_LENGTH (expr), ctx);
12925 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12926 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12929 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12935 case tcc_exceptional:
12939 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12940 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12941 expr = TREE_CHAIN (expr);
12942 goto recursive_label;
12945 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12946 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12952 case tcc_expression:
12953 case tcc_reference:
12954 case tcc_comparison:
12957 case tcc_statement:
12959 len = TREE_OPERAND_LENGTH (expr);
12960 for (i = 0; i < len; ++i)
12961 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12963 case tcc_declaration:
12964 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12965 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12966 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12968 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12969 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12970 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12971 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12972 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12974 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12975 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12977 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12979 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12980 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12981 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12985 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12986 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12987 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12988 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12989 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12990 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12991 if (INTEGRAL_TYPE_P (expr)
12992 || SCALAR_FLOAT_TYPE_P (expr))
12994 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12995 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12997 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12998 if (TREE_CODE (expr) == RECORD_TYPE
12999 || TREE_CODE (expr) == UNION_TYPE
13000 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13001 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13002 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13009 /* Helper function for outputting the checksum of a tree T. When
13010 debugging with gdb, you can "define mynext" to be "next" followed
13011 by "call debug_fold_checksum (op0)", then just trace down till the
13015 debug_fold_checksum (tree t)
13018 unsigned char checksum[16];
13019 struct md5_ctx ctx;
13020 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13022 md5_init_ctx (&ctx);
13023 fold_checksum_tree (t, &ctx, ht);
13024 md5_finish_ctx (&ctx, checksum);
13027 for (i = 0; i < 16; i++)
13028 fprintf (stderr, "%d ", checksum[i]);
13030 fprintf (stderr, "\n");
13035 /* Fold a unary tree expression with code CODE of type TYPE with an
13036 operand OP0. Return a folded expression if successful. Otherwise,
13037 return a tree expression with code CODE of type TYPE with an
13041 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13044 #ifdef ENABLE_FOLD_CHECKING
13045 unsigned char checksum_before[16], checksum_after[16];
13046 struct md5_ctx ctx;
13049 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13050 md5_init_ctx (&ctx);
13051 fold_checksum_tree (op0, &ctx, ht);
13052 md5_finish_ctx (&ctx, checksum_before);
13056 tem = fold_unary (code, type, op0);
13058 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13060 #ifdef ENABLE_FOLD_CHECKING
13061 md5_init_ctx (&ctx);
13062 fold_checksum_tree (op0, &ctx, ht);
13063 md5_finish_ctx (&ctx, checksum_after);
13066 if (memcmp (checksum_before, checksum_after, 16))
13067 fold_check_failed (op0, tem);
13072 /* Fold a binary tree expression with code CODE of type TYPE with
13073 operands OP0 and OP1. Return a folded expression if successful.
13074 Otherwise, return a tree expression with code CODE of type TYPE
13075 with operands OP0 and OP1. */
13078 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13082 #ifdef ENABLE_FOLD_CHECKING
13083 unsigned char checksum_before_op0[16],
13084 checksum_before_op1[16],
13085 checksum_after_op0[16],
13086 checksum_after_op1[16];
13087 struct md5_ctx ctx;
13090 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13091 md5_init_ctx (&ctx);
13092 fold_checksum_tree (op0, &ctx, ht);
13093 md5_finish_ctx (&ctx, checksum_before_op0);
13096 md5_init_ctx (&ctx);
13097 fold_checksum_tree (op1, &ctx, ht);
13098 md5_finish_ctx (&ctx, checksum_before_op1);
13102 tem = fold_binary (code, type, op0, op1);
13104 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13106 #ifdef ENABLE_FOLD_CHECKING
13107 md5_init_ctx (&ctx);
13108 fold_checksum_tree (op0, &ctx, ht);
13109 md5_finish_ctx (&ctx, checksum_after_op0);
13112 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13113 fold_check_failed (op0, tem);
13115 md5_init_ctx (&ctx);
13116 fold_checksum_tree (op1, &ctx, ht);
13117 md5_finish_ctx (&ctx, checksum_after_op1);
13120 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13121 fold_check_failed (op1, tem);
13126 /* Fold a ternary tree expression with code CODE of type TYPE with
13127 operands OP0, OP1, and OP2. Return a folded expression if
13128 successful. Otherwise, return a tree expression with code CODE of
13129 type TYPE with operands OP0, OP1, and OP2. */
13132 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13136 #ifdef ENABLE_FOLD_CHECKING
13137 unsigned char checksum_before_op0[16],
13138 checksum_before_op1[16],
13139 checksum_before_op2[16],
13140 checksum_after_op0[16],
13141 checksum_after_op1[16],
13142 checksum_after_op2[16];
13143 struct md5_ctx ctx;
13146 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13147 md5_init_ctx (&ctx);
13148 fold_checksum_tree (op0, &ctx, ht);
13149 md5_finish_ctx (&ctx, checksum_before_op0);
13152 md5_init_ctx (&ctx);
13153 fold_checksum_tree (op1, &ctx, ht);
13154 md5_finish_ctx (&ctx, checksum_before_op1);
13157 md5_init_ctx (&ctx);
13158 fold_checksum_tree (op2, &ctx, ht);
13159 md5_finish_ctx (&ctx, checksum_before_op2);
13163 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13164 tem = fold_ternary (code, type, op0, op1, op2);
13166 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13168 #ifdef ENABLE_FOLD_CHECKING
13169 md5_init_ctx (&ctx);
13170 fold_checksum_tree (op0, &ctx, ht);
13171 md5_finish_ctx (&ctx, checksum_after_op0);
13174 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13175 fold_check_failed (op0, tem);
13177 md5_init_ctx (&ctx);
13178 fold_checksum_tree (op1, &ctx, ht);
13179 md5_finish_ctx (&ctx, checksum_after_op1);
13182 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13183 fold_check_failed (op1, tem);
13185 md5_init_ctx (&ctx);
13186 fold_checksum_tree (op2, &ctx, ht);
13187 md5_finish_ctx (&ctx, checksum_after_op2);
13190 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13191 fold_check_failed (op2, tem);
13196 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13197 arguments in ARGARRAY, and a null static chain.
13198 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13199 of type TYPE from the given operands as constructed by build_call_array. */
13202 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13205 #ifdef ENABLE_FOLD_CHECKING
13206 unsigned char checksum_before_fn[16],
13207 checksum_before_arglist[16],
13208 checksum_after_fn[16],
13209 checksum_after_arglist[16];
13210 struct md5_ctx ctx;
13214 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13215 md5_init_ctx (&ctx);
13216 fold_checksum_tree (fn, &ctx, ht);
13217 md5_finish_ctx (&ctx, checksum_before_fn);
13220 md5_init_ctx (&ctx);
13221 for (i = 0; i < nargs; i++)
13222 fold_checksum_tree (argarray[i], &ctx, ht);
13223 md5_finish_ctx (&ctx, checksum_before_arglist);
13227 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13229 #ifdef ENABLE_FOLD_CHECKING
13230 md5_init_ctx (&ctx);
13231 fold_checksum_tree (fn, &ctx, ht);
13232 md5_finish_ctx (&ctx, checksum_after_fn);
13235 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13236 fold_check_failed (fn, tem);
13238 md5_init_ctx (&ctx);
13239 for (i = 0; i < nargs; i++)
13240 fold_checksum_tree (argarray[i], &ctx, ht);
13241 md5_finish_ctx (&ctx, checksum_after_arglist);
13244 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13245 fold_check_failed (NULL_TREE, tem);
13250 /* Perform constant folding and related simplification of initializer
13251 expression EXPR. These behave identically to "fold_buildN" but ignore
13252 potential run-time traps and exceptions that fold must preserve. */
13254 #define START_FOLD_INIT \
13255 int saved_signaling_nans = flag_signaling_nans;\
13256 int saved_trapping_math = flag_trapping_math;\
13257 int saved_rounding_math = flag_rounding_math;\
13258 int saved_trapv = flag_trapv;\
13259 int saved_folding_initializer = folding_initializer;\
13260 flag_signaling_nans = 0;\
13261 flag_trapping_math = 0;\
13262 flag_rounding_math = 0;\
13264 folding_initializer = 1;
13266 #define END_FOLD_INIT \
13267 flag_signaling_nans = saved_signaling_nans;\
13268 flag_trapping_math = saved_trapping_math;\
13269 flag_rounding_math = saved_rounding_math;\
13270 flag_trapv = saved_trapv;\
13271 folding_initializer = saved_folding_initializer;
13274 fold_build1_initializer (enum tree_code code, tree type, tree op)
13279 result = fold_build1 (code, type, op);
13286 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13291 result = fold_build2 (code, type, op0, op1);
13298 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13304 result = fold_build3 (code, type, op0, op1, op2);
13311 fold_build_call_array_initializer (tree type, tree fn,
13312 int nargs, tree *argarray)
13317 result = fold_build_call_array (type, fn, nargs, argarray);
13323 #undef START_FOLD_INIT
13324 #undef END_FOLD_INIT
13326 /* Determine if first argument is a multiple of second argument. Return 0 if
13327 it is not, or we cannot easily determined it to be.
13329 An example of the sort of thing we care about (at this point; this routine
13330 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13331 fold cases do now) is discovering that
13333 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13339 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13341 This code also handles discovering that
13343 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13345 is a multiple of 8 so we don't have to worry about dealing with a
13346 possible remainder.
13348 Note that we *look* inside a SAVE_EXPR only to determine how it was
13349 calculated; it is not safe for fold to do much of anything else with the
13350 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13351 at run time. For example, the latter example above *cannot* be implemented
13352 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13353 evaluation time of the original SAVE_EXPR is not necessarily the same at
13354 the time the new expression is evaluated. The only optimization of this
13355 sort that would be valid is changing
13357 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13361 SAVE_EXPR (I) * SAVE_EXPR (J)
13363 (where the same SAVE_EXPR (J) is used in the original and the
13364 transformed version). */
13367 multiple_of_p (tree type, tree top, tree bottom)
13369 if (operand_equal_p (top, bottom, 0))
13372 if (TREE_CODE (type) != INTEGER_TYPE)
13375 switch (TREE_CODE (top))
13378 /* Bitwise and provides a power of two multiple. If the mask is
13379 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13380 if (!integer_pow2p (bottom))
13385 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13386 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13390 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13391 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13394 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13398 op1 = TREE_OPERAND (top, 1);
13399 /* const_binop may not detect overflow correctly,
13400 so check for it explicitly here. */
13401 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13402 > TREE_INT_CST_LOW (op1)
13403 && TREE_INT_CST_HIGH (op1) == 0
13404 && 0 != (t1 = fold_convert (type,
13405 const_binop (LSHIFT_EXPR,
13408 && !TREE_OVERFLOW (t1))
13409 return multiple_of_p (type, t1, bottom);
13414 /* Can't handle conversions from non-integral or wider integral type. */
13415 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13416 || (TYPE_PRECISION (type)
13417 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13420 /* .. fall through ... */
13423 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13426 if (TREE_CODE (bottom) != INTEGER_CST
13427 || integer_zerop (bottom)
13428 || (TYPE_UNSIGNED (type)
13429 && (tree_int_cst_sgn (top) < 0
13430 || tree_int_cst_sgn (bottom) < 0)))
13432 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13440 /* Return true if `t' is known to be non-negative. If the return
13441 value is based on the assumption that signed overflow is undefined,
13442 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13443 *STRICT_OVERFLOW_P. */
13446 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13448 if (t == error_mark_node)
13451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13454 switch (TREE_CODE (t))
13457 /* Query VRP to see if it has recorded any information about
13458 the range of this object. */
13459 return ssa_name_nonnegative_p (t);
13462 /* We can't return 1 if flag_wrapv is set because
13463 ABS_EXPR<INT_MIN> = INT_MIN. */
13464 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13466 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13468 *strict_overflow_p = true;
13474 return tree_int_cst_sgn (t) >= 0;
13477 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13479 case POINTER_PLUS_EXPR:
13481 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13482 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13484 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13485 strict_overflow_p));
13487 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13488 both unsigned and at least 2 bits shorter than the result. */
13489 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13490 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13491 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13493 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13494 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13495 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13496 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13498 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13499 TYPE_PRECISION (inner2)) + 1;
13500 return prec < TYPE_PRECISION (TREE_TYPE (t));
13506 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13508 /* x * x for floating point x is always non-negative. */
13509 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13511 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13513 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13514 strict_overflow_p));
13517 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13518 both unsigned and their total bits is shorter than the result. */
13519 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13520 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13521 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13523 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13524 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13525 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13526 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13527 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13528 < TYPE_PRECISION (TREE_TYPE (t));
13534 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13536 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13537 strict_overflow_p));
13543 case TRUNC_DIV_EXPR:
13544 case CEIL_DIV_EXPR:
13545 case FLOOR_DIV_EXPR:
13546 case ROUND_DIV_EXPR:
13547 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13549 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13550 strict_overflow_p));
13552 case TRUNC_MOD_EXPR:
13553 case CEIL_MOD_EXPR:
13554 case FLOOR_MOD_EXPR:
13555 case ROUND_MOD_EXPR:
13557 case NON_LVALUE_EXPR:
13559 case FIX_TRUNC_EXPR:
13560 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13561 strict_overflow_p);
13563 case COMPOUND_EXPR:
13565 case GIMPLE_MODIFY_STMT:
13566 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13567 strict_overflow_p);
13570 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13571 strict_overflow_p);
13574 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13576 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13577 strict_overflow_p));
13581 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13582 tree outer_type = TREE_TYPE (t);
13584 if (TREE_CODE (outer_type) == REAL_TYPE)
13586 if (TREE_CODE (inner_type) == REAL_TYPE)
13587 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13588 strict_overflow_p);
13589 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13591 if (TYPE_UNSIGNED (inner_type))
13593 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13594 strict_overflow_p);
13597 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13599 if (TREE_CODE (inner_type) == REAL_TYPE)
13600 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13601 strict_overflow_p);
13602 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13603 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13604 && TYPE_UNSIGNED (inner_type);
13611 tree temp = TARGET_EXPR_SLOT (t);
13612 t = TARGET_EXPR_INITIAL (t);
13614 /* If the initializer is non-void, then it's a normal expression
13615 that will be assigned to the slot. */
13616 if (!VOID_TYPE_P (t))
13617 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13619 /* Otherwise, the initializer sets the slot in some way. One common
13620 way is an assignment statement at the end of the initializer. */
13623 if (TREE_CODE (t) == BIND_EXPR)
13624 t = expr_last (BIND_EXPR_BODY (t));
13625 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13626 || TREE_CODE (t) == TRY_CATCH_EXPR)
13627 t = expr_last (TREE_OPERAND (t, 0));
13628 else if (TREE_CODE (t) == STATEMENT_LIST)
13633 if ((TREE_CODE (t) == MODIFY_EXPR
13634 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13635 && GENERIC_TREE_OPERAND (t, 0) == temp)
13636 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13637 strict_overflow_p);
13644 tree fndecl = get_callee_fndecl (t);
13645 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13646 switch (DECL_FUNCTION_CODE (fndecl))
13648 CASE_FLT_FN (BUILT_IN_ACOS):
13649 CASE_FLT_FN (BUILT_IN_ACOSH):
13650 CASE_FLT_FN (BUILT_IN_CABS):
13651 CASE_FLT_FN (BUILT_IN_COSH):
13652 CASE_FLT_FN (BUILT_IN_ERFC):
13653 CASE_FLT_FN (BUILT_IN_EXP):
13654 CASE_FLT_FN (BUILT_IN_EXP10):
13655 CASE_FLT_FN (BUILT_IN_EXP2):
13656 CASE_FLT_FN (BUILT_IN_FABS):
13657 CASE_FLT_FN (BUILT_IN_FDIM):
13658 CASE_FLT_FN (BUILT_IN_HYPOT):
13659 CASE_FLT_FN (BUILT_IN_POW10):
13660 CASE_INT_FN (BUILT_IN_FFS):
13661 CASE_INT_FN (BUILT_IN_PARITY):
13662 CASE_INT_FN (BUILT_IN_POPCOUNT):
13663 case BUILT_IN_BSWAP32:
13664 case BUILT_IN_BSWAP64:
13668 CASE_FLT_FN (BUILT_IN_SQRT):
13669 /* sqrt(-0.0) is -0.0. */
13670 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13672 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13673 strict_overflow_p);
13675 CASE_FLT_FN (BUILT_IN_ASINH):
13676 CASE_FLT_FN (BUILT_IN_ATAN):
13677 CASE_FLT_FN (BUILT_IN_ATANH):
13678 CASE_FLT_FN (BUILT_IN_CBRT):
13679 CASE_FLT_FN (BUILT_IN_CEIL):
13680 CASE_FLT_FN (BUILT_IN_ERF):
13681 CASE_FLT_FN (BUILT_IN_EXPM1):
13682 CASE_FLT_FN (BUILT_IN_FLOOR):
13683 CASE_FLT_FN (BUILT_IN_FMOD):
13684 CASE_FLT_FN (BUILT_IN_FREXP):
13685 CASE_FLT_FN (BUILT_IN_LCEIL):
13686 CASE_FLT_FN (BUILT_IN_LDEXP):
13687 CASE_FLT_FN (BUILT_IN_LFLOOR):
13688 CASE_FLT_FN (BUILT_IN_LLCEIL):
13689 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13690 CASE_FLT_FN (BUILT_IN_LLRINT):
13691 CASE_FLT_FN (BUILT_IN_LLROUND):
13692 CASE_FLT_FN (BUILT_IN_LRINT):
13693 CASE_FLT_FN (BUILT_IN_LROUND):
13694 CASE_FLT_FN (BUILT_IN_MODF):
13695 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13696 CASE_FLT_FN (BUILT_IN_RINT):
13697 CASE_FLT_FN (BUILT_IN_ROUND):
13698 CASE_FLT_FN (BUILT_IN_SCALB):
13699 CASE_FLT_FN (BUILT_IN_SCALBLN):
13700 CASE_FLT_FN (BUILT_IN_SCALBN):
13701 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13702 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13703 CASE_FLT_FN (BUILT_IN_SINH):
13704 CASE_FLT_FN (BUILT_IN_TANH):
13705 CASE_FLT_FN (BUILT_IN_TRUNC):
13706 /* True if the 1st argument is nonnegative. */
13707 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13708 strict_overflow_p);
13710 CASE_FLT_FN (BUILT_IN_FMAX):
13711 /* True if the 1st OR 2nd arguments are nonnegative. */
13712 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13714 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13715 strict_overflow_p)));
13717 CASE_FLT_FN (BUILT_IN_FMIN):
13718 /* True if the 1st AND 2nd arguments are nonnegative. */
13719 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13721 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13722 strict_overflow_p)));
13724 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13725 /* True if the 2nd argument is nonnegative. */
13726 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13727 strict_overflow_p);
13729 CASE_FLT_FN (BUILT_IN_POWI):
13730 /* True if the 1st argument is nonnegative or the second
13731 argument is an even integer. */
13732 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13734 tree arg1 = CALL_EXPR_ARG (t, 1);
13735 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13738 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13739 strict_overflow_p);
13741 CASE_FLT_FN (BUILT_IN_POW):
13742 /* True if the 1st argument is nonnegative or the second
13743 argument is an even integer valued real. */
13744 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13749 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13750 n = real_to_integer (&c);
13753 REAL_VALUE_TYPE cint;
13754 real_from_integer (&cint, VOIDmode, n,
13755 n < 0 ? -1 : 0, 0);
13756 if (real_identical (&c, &cint))
13760 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13761 strict_overflow_p);
13768 /* ... fall through ... */
13772 tree type = TREE_TYPE (t);
13773 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13774 && truth_value_p (TREE_CODE (t)))
13775 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13776 have a signed:1 type (where the value is -1 and 0). */
13781 /* We don't know sign of `t', so be conservative and return false. */
13785 /* Return true if `t' is known to be non-negative. Handle warnings
13786 about undefined signed overflow. */
13789 tree_expr_nonnegative_p (tree t)
13791 bool ret, strict_overflow_p;
13793 strict_overflow_p = false;
13794 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13795 if (strict_overflow_p)
13796 fold_overflow_warning (("assuming signed overflow does not occur when "
13797 "determining that expression is always "
13799 WARN_STRICT_OVERFLOW_MISC);
13803 /* Return true when T is an address and is known to be nonzero.
13804 For floating point we further ensure that T is not denormal.
13805 Similar logic is present in nonzero_address in rtlanal.h.
13807 If the return value is based on the assumption that signed overflow
13808 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13809 change *STRICT_OVERFLOW_P. */
13812 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13814 tree type = TREE_TYPE (t);
13815 bool sub_strict_overflow_p;
13817 /* Doing something useful for floating point would need more work. */
13818 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13821 switch (TREE_CODE (t))
13824 /* Query VRP to see if it has recorded any information about
13825 the range of this object. */
13826 return ssa_name_nonzero_p (t);
13829 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13830 strict_overflow_p);
13833 return !integer_zerop (t);
13835 case POINTER_PLUS_EXPR:
13837 if (TYPE_OVERFLOW_UNDEFINED (type))
13839 /* With the presence of negative values it is hard
13840 to say something. */
13841 sub_strict_overflow_p = false;
13842 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13843 &sub_strict_overflow_p)
13844 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13845 &sub_strict_overflow_p))
13847 /* One of operands must be positive and the other non-negative. */
13848 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13849 overflows, on a twos-complement machine the sum of two
13850 nonnegative numbers can never be zero. */
13851 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13853 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13854 strict_overflow_p));
13859 if (TYPE_OVERFLOW_UNDEFINED (type))
13861 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13863 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13864 strict_overflow_p))
13866 *strict_overflow_p = true;
13874 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13875 tree outer_type = TREE_TYPE (t);
13877 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13878 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13879 strict_overflow_p));
13885 tree base = get_base_address (TREE_OPERAND (t, 0));
13890 /* Weak declarations may link to NULL. */
13891 if (VAR_OR_FUNCTION_DECL_P (base))
13892 return !DECL_WEAK (base);
13894 /* Constants are never weak. */
13895 if (CONSTANT_CLASS_P (base))
13902 sub_strict_overflow_p = false;
13903 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13904 &sub_strict_overflow_p)
13905 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13906 &sub_strict_overflow_p))
13908 if (sub_strict_overflow_p)
13909 *strict_overflow_p = true;
13915 sub_strict_overflow_p = false;
13916 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13917 &sub_strict_overflow_p)
13918 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13919 &sub_strict_overflow_p))
13921 if (sub_strict_overflow_p)
13922 *strict_overflow_p = true;
13927 sub_strict_overflow_p = false;
13928 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13929 &sub_strict_overflow_p))
13931 if (sub_strict_overflow_p)
13932 *strict_overflow_p = true;
13934 /* When both operands are nonzero, then MAX must be too. */
13935 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13936 strict_overflow_p))
13939 /* MAX where operand 0 is positive is positive. */
13940 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13941 strict_overflow_p);
13943 /* MAX where operand 1 is positive is positive. */
13944 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13945 &sub_strict_overflow_p)
13946 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13947 &sub_strict_overflow_p))
13949 if (sub_strict_overflow_p)
13950 *strict_overflow_p = true;
13955 case COMPOUND_EXPR:
13957 case GIMPLE_MODIFY_STMT:
13959 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13960 strict_overflow_p);
13963 case NON_LVALUE_EXPR:
13964 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13965 strict_overflow_p);
13968 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13970 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13971 strict_overflow_p));
13974 return alloca_call_p (t);
13982 /* Return true when T is an address and is known to be nonzero.
13983 Handle warnings about undefined signed overflow. */
13986 tree_expr_nonzero_p (tree t)
13988 bool ret, strict_overflow_p;
13990 strict_overflow_p = false;
13991 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13992 if (strict_overflow_p)
13993 fold_overflow_warning (("assuming signed overflow does not occur when "
13994 "determining that expression is always "
13996 WARN_STRICT_OVERFLOW_MISC);
14000 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14001 attempt to fold the expression to a constant without modifying TYPE,
14004 If the expression could be simplified to a constant, then return
14005 the constant. If the expression would not be simplified to a
14006 constant, then return NULL_TREE. */
14009 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14011 tree tem = fold_binary (code, type, op0, op1);
14012 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14015 /* Given the components of a unary expression CODE, TYPE and OP0,
14016 attempt to fold the expression to a constant without modifying
14019 If the expression could be simplified to a constant, then return
14020 the constant. If the expression would not be simplified to a
14021 constant, then return NULL_TREE. */
14024 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14026 tree tem = fold_unary (code, type, op0);
14027 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14030 /* If EXP represents referencing an element in a constant string
14031 (either via pointer arithmetic or array indexing), return the
14032 tree representing the value accessed, otherwise return NULL. */
14035 fold_read_from_constant_string (tree exp)
14037 if ((TREE_CODE (exp) == INDIRECT_REF
14038 || TREE_CODE (exp) == ARRAY_REF)
14039 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14041 tree exp1 = TREE_OPERAND (exp, 0);
14045 if (TREE_CODE (exp) == INDIRECT_REF)
14046 string = string_constant (exp1, &index);
14049 tree low_bound = array_ref_low_bound (exp);
14050 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14052 /* Optimize the special-case of a zero lower bound.
14054 We convert the low_bound to sizetype to avoid some problems
14055 with constant folding. (E.g. suppose the lower bound is 1,
14056 and its mode is QI. Without the conversion,l (ARRAY
14057 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14058 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14059 if (! integer_zerop (low_bound))
14060 index = size_diffop (index, fold_convert (sizetype, low_bound));
14066 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14067 && TREE_CODE (string) == STRING_CST
14068 && TREE_CODE (index) == INTEGER_CST
14069 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14070 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14072 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14073 return build_int_cst_type (TREE_TYPE (exp),
14074 (TREE_STRING_POINTER (string)
14075 [TREE_INT_CST_LOW (index)]));
14080 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14081 an integer constant or real constant.
14083 TYPE is the type of the result. */
14086 fold_negate_const (tree arg0, tree type)
14088 tree t = NULL_TREE;
14090 switch (TREE_CODE (arg0))
14094 unsigned HOST_WIDE_INT low;
14095 HOST_WIDE_INT high;
14096 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14097 TREE_INT_CST_HIGH (arg0),
14099 t = force_fit_type_double (type, low, high, 1,
14100 (overflow | TREE_OVERFLOW (arg0))
14101 && !TYPE_UNSIGNED (type));
14106 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14110 gcc_unreachable ();
14116 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14117 an integer constant or real constant.
14119 TYPE is the type of the result. */
14122 fold_abs_const (tree arg0, tree type)
14124 tree t = NULL_TREE;
14126 switch (TREE_CODE (arg0))
14129 /* If the value is unsigned, then the absolute value is
14130 the same as the ordinary value. */
14131 if (TYPE_UNSIGNED (type))
14133 /* Similarly, if the value is non-negative. */
14134 else if (INT_CST_LT (integer_minus_one_node, arg0))
14136 /* If the value is negative, then the absolute value is
14140 unsigned HOST_WIDE_INT low;
14141 HOST_WIDE_INT high;
14142 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14143 TREE_INT_CST_HIGH (arg0),
14145 t = force_fit_type_double (type, low, high, -1,
14146 overflow | TREE_OVERFLOW (arg0));
14151 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14152 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14158 gcc_unreachable ();
14164 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14165 constant. TYPE is the type of the result. */
14168 fold_not_const (tree arg0, tree type)
14170 tree t = NULL_TREE;
14172 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14174 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14175 ~TREE_INT_CST_HIGH (arg0), 0,
14176 TREE_OVERFLOW (arg0));
14181 /* Given CODE, a relational operator, the target type, TYPE and two
14182 constant operands OP0 and OP1, return the result of the
14183 relational operation. If the result is not a compile time
14184 constant, then return NULL_TREE. */
14187 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14189 int result, invert;
14191 /* From here on, the only cases we handle are when the result is
14192 known to be a constant. */
14194 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14196 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14197 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14199 /* Handle the cases where either operand is a NaN. */
14200 if (real_isnan (c0) || real_isnan (c1))
14210 case UNORDERED_EXPR:
14224 if (flag_trapping_math)
14230 gcc_unreachable ();
14233 return constant_boolean_node (result, type);
14236 return constant_boolean_node (real_compare (code, c0, c1), type);
14239 /* Handle equality/inequality of complex constants. */
14240 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14242 tree rcond = fold_relational_const (code, type,
14243 TREE_REALPART (op0),
14244 TREE_REALPART (op1));
14245 tree icond = fold_relational_const (code, type,
14246 TREE_IMAGPART (op0),
14247 TREE_IMAGPART (op1));
14248 if (code == EQ_EXPR)
14249 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14250 else if (code == NE_EXPR)
14251 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14256 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14258 To compute GT, swap the arguments and do LT.
14259 To compute GE, do LT and invert the result.
14260 To compute LE, swap the arguments, do LT and invert the result.
14261 To compute NE, do EQ and invert the result.
14263 Therefore, the code below must handle only EQ and LT. */
14265 if (code == LE_EXPR || code == GT_EXPR)
14270 code = swap_tree_comparison (code);
14273 /* Note that it is safe to invert for real values here because we
14274 have already handled the one case that it matters. */
14277 if (code == NE_EXPR || code == GE_EXPR)
14280 code = invert_tree_comparison (code, false);
14283 /* Compute a result for LT or EQ if args permit;
14284 Otherwise return T. */
14285 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14287 if (code == EQ_EXPR)
14288 result = tree_int_cst_equal (op0, op1);
14289 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14290 result = INT_CST_LT_UNSIGNED (op0, op1);
14292 result = INT_CST_LT (op0, op1);
14299 return constant_boolean_node (result, type);
14302 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14303 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14307 fold_build_cleanup_point_expr (tree type, tree expr)
14309 /* If the expression does not have side effects then we don't have to wrap
14310 it with a cleanup point expression. */
14311 if (!TREE_SIDE_EFFECTS (expr))
14314 /* If the expression is a return, check to see if the expression inside the
14315 return has no side effects or the right hand side of the modify expression
14316 inside the return. If either don't have side effects set we don't need to
14317 wrap the expression in a cleanup point expression. Note we don't check the
14318 left hand side of the modify because it should always be a return decl. */
14319 if (TREE_CODE (expr) == RETURN_EXPR)
14321 tree op = TREE_OPERAND (expr, 0);
14322 if (!op || !TREE_SIDE_EFFECTS (op))
14324 op = TREE_OPERAND (op, 1);
14325 if (!TREE_SIDE_EFFECTS (op))
14329 return build1 (CLEANUP_POINT_EXPR, type, expr);
14332 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14333 of an indirection through OP0, or NULL_TREE if no simplification is
14337 fold_indirect_ref_1 (tree type, tree op0)
14343 subtype = TREE_TYPE (sub);
14344 if (!POINTER_TYPE_P (subtype))
14347 if (TREE_CODE (sub) == ADDR_EXPR)
14349 tree op = TREE_OPERAND (sub, 0);
14350 tree optype = TREE_TYPE (op);
14351 /* *&CONST_DECL -> to the value of the const decl. */
14352 if (TREE_CODE (op) == CONST_DECL)
14353 return DECL_INITIAL (op);
14354 /* *&p => p; make sure to handle *&"str"[cst] here. */
14355 if (type == optype)
14357 tree fop = fold_read_from_constant_string (op);
14363 /* *(foo *)&fooarray => fooarray[0] */
14364 else if (TREE_CODE (optype) == ARRAY_TYPE
14365 && type == TREE_TYPE (optype))
14367 tree type_domain = TYPE_DOMAIN (optype);
14368 tree min_val = size_zero_node;
14369 if (type_domain && TYPE_MIN_VALUE (type_domain))
14370 min_val = TYPE_MIN_VALUE (type_domain);
14371 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14373 /* *(foo *)&complexfoo => __real__ complexfoo */
14374 else if (TREE_CODE (optype) == COMPLEX_TYPE
14375 && type == TREE_TYPE (optype))
14376 return fold_build1 (REALPART_EXPR, type, op);
14377 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14378 else if (TREE_CODE (optype) == VECTOR_TYPE
14379 && type == TREE_TYPE (optype))
14381 tree part_width = TYPE_SIZE (type);
14382 tree index = bitsize_int (0);
14383 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14387 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14388 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14389 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14391 tree op00 = TREE_OPERAND (sub, 0);
14392 tree op01 = TREE_OPERAND (sub, 1);
14396 op00type = TREE_TYPE (op00);
14397 if (TREE_CODE (op00) == ADDR_EXPR
14398 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14399 && type == TREE_TYPE (TREE_TYPE (op00type)))
14401 tree size = TYPE_SIZE_UNIT (type);
14402 if (tree_int_cst_equal (size, op01))
14403 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14407 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14408 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14409 && type == TREE_TYPE (TREE_TYPE (subtype)))
14412 tree min_val = size_zero_node;
14413 sub = build_fold_indirect_ref (sub);
14414 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14415 if (type_domain && TYPE_MIN_VALUE (type_domain))
14416 min_val = TYPE_MIN_VALUE (type_domain);
14417 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14423 /* Builds an expression for an indirection through T, simplifying some
14427 build_fold_indirect_ref (tree t)
14429 tree type = TREE_TYPE (TREE_TYPE (t));
14430 tree sub = fold_indirect_ref_1 (type, t);
14435 return build1 (INDIRECT_REF, type, t);
14438 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14441 fold_indirect_ref (tree t)
14443 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14451 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14452 whose result is ignored. The type of the returned tree need not be
14453 the same as the original expression. */
14456 fold_ignored_result (tree t)
14458 if (!TREE_SIDE_EFFECTS (t))
14459 return integer_zero_node;
14462 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14465 t = TREE_OPERAND (t, 0);
14469 case tcc_comparison:
14470 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14471 t = TREE_OPERAND (t, 0);
14472 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14473 t = TREE_OPERAND (t, 1);
14478 case tcc_expression:
14479 switch (TREE_CODE (t))
14481 case COMPOUND_EXPR:
14482 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14484 t = TREE_OPERAND (t, 0);
14488 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14489 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14491 t = TREE_OPERAND (t, 0);
14504 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14505 This can only be applied to objects of a sizetype. */
14508 round_up (tree value, int divisor)
14510 tree div = NULL_TREE;
14512 gcc_assert (divisor > 0);
14516 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14517 have to do anything. Only do this when we are not given a const,
14518 because in that case, this check is more expensive than just
14520 if (TREE_CODE (value) != INTEGER_CST)
14522 div = build_int_cst (TREE_TYPE (value), divisor);
14524 if (multiple_of_p (TREE_TYPE (value), value, div))
14528 /* If divisor is a power of two, simplify this to bit manipulation. */
14529 if (divisor == (divisor & -divisor))
14531 if (TREE_CODE (value) == INTEGER_CST)
14533 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14534 unsigned HOST_WIDE_INT high;
14537 if ((low & (divisor - 1)) == 0)
14540 overflow_p = TREE_OVERFLOW (value);
14541 high = TREE_INT_CST_HIGH (value);
14542 low &= ~(divisor - 1);
14551 return force_fit_type_double (TREE_TYPE (value), low, high,
14558 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14559 value = size_binop (PLUS_EXPR, value, t);
14560 t = build_int_cst (TREE_TYPE (value), -divisor);
14561 value = size_binop (BIT_AND_EXPR, value, t);
14567 div = build_int_cst (TREE_TYPE (value), divisor);
14568 value = size_binop (CEIL_DIV_EXPR, value, div);
14569 value = size_binop (MULT_EXPR, value, div);
14575 /* Likewise, but round down. */
14578 round_down (tree value, int divisor)
14580 tree div = NULL_TREE;
14582 gcc_assert (divisor > 0);
14586 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14587 have to do anything. Only do this when we are not given a const,
14588 because in that case, this check is more expensive than just
14590 if (TREE_CODE (value) != INTEGER_CST)
14592 div = build_int_cst (TREE_TYPE (value), divisor);
14594 if (multiple_of_p (TREE_TYPE (value), value, div))
14598 /* If divisor is a power of two, simplify this to bit manipulation. */
14599 if (divisor == (divisor & -divisor))
14603 t = build_int_cst (TREE_TYPE (value), -divisor);
14604 value = size_binop (BIT_AND_EXPR, value, t);
14609 div = build_int_cst (TREE_TYPE (value), divisor);
14610 value = size_binop (FLOOR_DIV_EXPR, value, div);
14611 value = size_binop (MULT_EXPR, value, div);
14617 /* Returns the pointer to the base of the object addressed by EXP and
14618 extracts the information about the offset of the access, storing it
14619 to PBITPOS and POFFSET. */
14622 split_address_to_core_and_offset (tree exp,
14623 HOST_WIDE_INT *pbitpos, tree *poffset)
14626 enum machine_mode mode;
14627 int unsignedp, volatilep;
14628 HOST_WIDE_INT bitsize;
14630 if (TREE_CODE (exp) == ADDR_EXPR)
14632 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14633 poffset, &mode, &unsignedp, &volatilep,
14635 core = fold_addr_expr (core);
14641 *poffset = NULL_TREE;
14647 /* Returns true if addresses of E1 and E2 differ by a constant, false
14648 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14651 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14654 HOST_WIDE_INT bitpos1, bitpos2;
14655 tree toffset1, toffset2, tdiff, type;
14657 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14658 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14660 if (bitpos1 % BITS_PER_UNIT != 0
14661 || bitpos2 % BITS_PER_UNIT != 0
14662 || !operand_equal_p (core1, core2, 0))
14665 if (toffset1 && toffset2)
14667 type = TREE_TYPE (toffset1);
14668 if (type != TREE_TYPE (toffset2))
14669 toffset2 = fold_convert (type, toffset2);
14671 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14672 if (!cst_and_fits_in_hwi (tdiff))
14675 *diff = int_cst_value (tdiff);
14677 else if (toffset1 || toffset2)
14679 /* If only one of the offsets is non-constant, the difference cannot
14686 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14690 /* Simplify the floating point expression EXP when the sign of the
14691 result is not significant. Return NULL_TREE if no simplification
14695 fold_strip_sign_ops (tree exp)
14699 switch (TREE_CODE (exp))
14703 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14704 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14708 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14710 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14711 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14712 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14713 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14714 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14715 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14718 case COMPOUND_EXPR:
14719 arg0 = TREE_OPERAND (exp, 0);
14720 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14722 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14726 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14727 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14729 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14730 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14731 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14736 const enum built_in_function fcode = builtin_mathfn_code (exp);
14739 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14740 /* Strip copysign function call, return the 1st argument. */
14741 arg0 = CALL_EXPR_ARG (exp, 0);
14742 arg1 = CALL_EXPR_ARG (exp, 1);
14743 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14746 /* Strip sign ops from the argument of "odd" math functions. */
14747 if (negate_mathfn_p (fcode))
14749 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14751 return build_call_expr (get_callee_fndecl (exp), 1, arg0);