1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
64 #include "langhooks.h"
67 /* Nonzero if we are folding constants inside an initializer; zero
69 int folding_initializer = 0;
71 /* The following constants represent a bit based encoding of GCC's
72 comparison operators. This encoding simplifies transformations
73 on relational comparison operators, such as AND and OR. */
74 enum comparison_code {
93 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
94 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
95 static bool negate_mathfn_p (enum built_in_function);
96 static bool negate_expr_p (tree);
97 static tree negate_expr (tree);
98 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
99 static tree associate_trees (tree, tree, enum tree_code, tree);
100 static tree const_binop (enum tree_code, tree, tree, int);
101 static enum comparison_code comparison_to_compcode (enum tree_code);
102 static enum tree_code compcode_to_comparison (enum comparison_code);
103 static tree combine_comparisons (enum tree_code, enum tree_code,
104 enum tree_code, tree, tree, tree);
105 static int truth_value_p (enum tree_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, int, int, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static int all_ones_mask_p (tree, int);
117 static tree sign_bit_p (tree, tree);
118 static int simple_operand_p (tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
205 int sign_extended_type;
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
211 prec = TYPE_PRECISION (type);
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 else if (prec == HOST_BITS_PER_WIDE_INT)
244 if ((HOST_WIDE_INT)l1 < 0)
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
284 int sign_extended_type;
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
292 overflow = fit_double_type (low, high, &low, &high, type);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
299 || (overflowable > 0 && sign_extended_type))
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
326 unsigned HOST_WIDE_INT l;
330 h = h1 + h2 + (l < l1);
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
354 return (*hv & h1) < 0;
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
387 memset (prod, 0, sizeof prod);
389 for (i = 0; i < 4; i++)
392 for (j = 0; j < 4; j++)
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
408 /* Unsigned overflow is immediate. */
410 return (toplow | tophigh) != 0;
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 unsigned HOST_WIDE_INT signmask;
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 if (SHIFT_COUNT_TRUNCATED)
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
456 else if (count >= HOST_BITS_PER_WIDE_INT)
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 /* Sign extend all bits that are beyond the precision. */
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
501 unsigned HOST_WIDE_INT signmask;
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
507 if (SHIFT_COUNT_TRUNCATED)
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
517 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
529 /* Zero / sign extend all bits that are beyond the precision. */
531 if (count >= (HOST_WIDE_INT)prec)
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
630 /* Calculate quotient sign and convert operands to unsigned. */
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
644 neg_double (lden, hden, &lden, &hden);
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
651 /* This unsigned division rounds toward zero. */
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
665 memset (quo, 0, sizeof quo);
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
702 scale = BASE / (den[den_hi_sig] + 1);
704 { /* scale divisor and dividend */
706 for (i = 0; i <= 4 - 1; i++)
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
715 for (i = 0; i <= 4 - 1; i++)
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
753 for (j = 0; j <= den_hi_sig; j++)
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
775 num [num_hi_sig] += carry;
778 /* Store the quotient digit. */
783 decode (quo, lquo, hquo);
786 /* If result is negative, make it so. */
788 neg_double (*lquo, *hquo, lquo, hquo);
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 case ROUND_MOD_EXPR: /* round to closest integer */
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
833 /* Get absolute values. */
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 neg_double (lden, hden, &labs_den, &habs_den);
839 /* If (2 * abs (lrem) >= abs (lden)) */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, <wice, &htwice);
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den < ltwice)))
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
879 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
909 return build_int_cst_wide (type, quol, quoh);
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
921 static int fold_deferring_overflow_warnings;
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
928 static const char* fold_deferred_overflow_warning;
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
939 fold_defer_overflow_warnings (void)
941 ++fold_deferring_overflow_warnings;
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
954 fold_undefer_overflow_warnings (bool issue, tree stmt, int code)
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
963 if (fold_deferred_overflow_warning != NULL
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = code;
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
973 if (!issue || warnmsg == NULL)
976 /* Use the smallest code level when deciding to issue the
978 if (code == 0 || code > (int) fold_deferred_overflow_code)
979 code = fold_deferred_overflow_code;
981 if (!issue_strict_overflow_warning (code))
984 if (stmt == NULL_TREE || !expr_has_location (stmt))
985 locus = input_location;
987 locus = expr_location (stmt);
988 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
991 /* Stop deferring overflow warnings, ignoring any deferred
995 fold_undefer_and_ignore_overflow_warnings (void)
997 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1000 /* Whether we are deferring overflow warnings. */
1003 fold_deferring_overflow_warnings_p (void)
1005 return fold_deferring_overflow_warnings > 0;
1008 /* This is called when we fold something based on the fact that signed
1009 overflow is undefined. */
1012 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1014 gcc_assert (!flag_wrapv && !flag_trapv);
1015 if (fold_deferring_overflow_warnings > 0)
1017 if (fold_deferred_overflow_warning == NULL
1018 || wc < fold_deferred_overflow_code)
1020 fold_deferred_overflow_warning = gmsgid;
1021 fold_deferred_overflow_code = wc;
1024 else if (issue_strict_overflow_warning (wc))
1025 warning (OPT_Wstrict_overflow, gmsgid);
1028 /* Return true if the built-in mathematical function specified by CODE
1029 is odd, i.e. -f(x) == f(-x). */
1032 negate_mathfn_p (enum built_in_function code)
1036 CASE_FLT_FN (BUILT_IN_ASIN):
1037 CASE_FLT_FN (BUILT_IN_ASINH):
1038 CASE_FLT_FN (BUILT_IN_ATAN):
1039 CASE_FLT_FN (BUILT_IN_ATANH):
1040 CASE_FLT_FN (BUILT_IN_CASIN):
1041 CASE_FLT_FN (BUILT_IN_CASINH):
1042 CASE_FLT_FN (BUILT_IN_CATAN):
1043 CASE_FLT_FN (BUILT_IN_CATANH):
1044 CASE_FLT_FN (BUILT_IN_CBRT):
1045 CASE_FLT_FN (BUILT_IN_CPROJ):
1046 CASE_FLT_FN (BUILT_IN_CSIN):
1047 CASE_FLT_FN (BUILT_IN_CSINH):
1048 CASE_FLT_FN (BUILT_IN_CTAN):
1049 CASE_FLT_FN (BUILT_IN_CTANH):
1050 CASE_FLT_FN (BUILT_IN_ERF):
1051 CASE_FLT_FN (BUILT_IN_LLROUND):
1052 CASE_FLT_FN (BUILT_IN_LROUND):
1053 CASE_FLT_FN (BUILT_IN_ROUND):
1054 CASE_FLT_FN (BUILT_IN_SIN):
1055 CASE_FLT_FN (BUILT_IN_SINH):
1056 CASE_FLT_FN (BUILT_IN_TAN):
1057 CASE_FLT_FN (BUILT_IN_TANH):
1058 CASE_FLT_FN (BUILT_IN_TRUNC):
1061 CASE_FLT_FN (BUILT_IN_LLRINT):
1062 CASE_FLT_FN (BUILT_IN_LRINT):
1063 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1064 CASE_FLT_FN (BUILT_IN_RINT):
1065 return !flag_rounding_math;
1073 /* Check whether we may negate an integer constant T without causing
1077 may_negate_without_overflow_p (const_tree t)
1079 unsigned HOST_WIDE_INT val;
1083 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085 type = TREE_TYPE (t);
1086 if (TYPE_UNSIGNED (type))
1089 prec = TYPE_PRECISION (type);
1090 if (prec > HOST_BITS_PER_WIDE_INT)
1092 if (TREE_INT_CST_LOW (t) != 0)
1094 prec -= HOST_BITS_PER_WIDE_INT;
1095 val = TREE_INT_CST_HIGH (t);
1098 val = TREE_INT_CST_LOW (t);
1099 if (prec < HOST_BITS_PER_WIDE_INT)
1100 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1101 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1104 /* Determine whether an expression T can be cheaply negated using
1105 the function negate_expr without introducing undefined overflow. */
1108 negate_expr_p (tree t)
1115 type = TREE_TYPE (t);
1117 STRIP_SIGN_NOPS (t);
1118 switch (TREE_CODE (t))
1121 if (TYPE_OVERFLOW_WRAPS (type))
1124 /* Check that -CST will not overflow type. */
1125 return may_negate_without_overflow_p (t);
1127 return (INTEGRAL_TYPE_P (type)
1128 && TYPE_OVERFLOW_WRAPS (type));
1135 return negate_expr_p (TREE_REALPART (t))
1136 && negate_expr_p (TREE_IMAGPART (t));
1139 return negate_expr_p (TREE_OPERAND (t, 0))
1140 && negate_expr_p (TREE_OPERAND (t, 1));
1143 return negate_expr_p (TREE_OPERAND (t, 0));
1146 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1147 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1149 /* -(A + B) -> (-B) - A. */
1150 if (negate_expr_p (TREE_OPERAND (t, 1))
1151 && reorder_operands_p (TREE_OPERAND (t, 0),
1152 TREE_OPERAND (t, 1)))
1154 /* -(A + B) -> (-A) - B. */
1155 return negate_expr_p (TREE_OPERAND (t, 0));
1158 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1159 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1160 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1161 && reorder_operands_p (TREE_OPERAND (t, 0),
1162 TREE_OPERAND (t, 1));
1165 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1171 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1172 return negate_expr_p (TREE_OPERAND (t, 1))
1173 || negate_expr_p (TREE_OPERAND (t, 0));
1176 case TRUNC_DIV_EXPR:
1177 case ROUND_DIV_EXPR:
1178 case FLOOR_DIV_EXPR:
1180 case EXACT_DIV_EXPR:
1181 /* In general we can't negate A / B, because if A is INT_MIN and
1182 B is 1, we may turn this into INT_MIN / -1 which is undefined
1183 and actually traps on some architectures. But if overflow is
1184 undefined, we can negate, because - (INT_MIN / 1) is an
1186 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1187 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1189 return negate_expr_p (TREE_OPERAND (t, 1))
1190 || negate_expr_p (TREE_OPERAND (t, 0));
1193 /* Negate -((double)float) as (double)(-float). */
1194 if (TREE_CODE (type) == REAL_TYPE)
1196 tree tem = strip_float_extensions (t);
1198 return negate_expr_p (tem);
1203 /* Negate -f(x) as f(-x). */
1204 if (negate_mathfn_p (builtin_mathfn_code (t)))
1205 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1209 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1210 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1212 tree op1 = TREE_OPERAND (t, 1);
1213 if (TREE_INT_CST_HIGH (op1) == 0
1214 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1215 == TREE_INT_CST_LOW (op1))
1226 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1227 simplification is possible.
1228 If negate_expr_p would return true for T, NULL_TREE will never be
1232 fold_negate_expr (tree t)
1234 tree type = TREE_TYPE (t);
1237 switch (TREE_CODE (t))
1239 /* Convert - (~A) to A + 1. */
1241 if (INTEGRAL_TYPE_P (type))
1242 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1243 build_int_cst (type, 1));
1247 tem = fold_negate_const (t, type);
1248 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1249 || !TYPE_OVERFLOW_TRAPS (type))
1254 tem = fold_negate_const (t, type);
1255 /* Two's complement FP formats, such as c4x, may overflow. */
1256 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1262 tree rpart = negate_expr (TREE_REALPART (t));
1263 tree ipart = negate_expr (TREE_IMAGPART (t));
1265 if ((TREE_CODE (rpart) == REAL_CST
1266 && TREE_CODE (ipart) == REAL_CST)
1267 || (TREE_CODE (rpart) == INTEGER_CST
1268 && TREE_CODE (ipart) == INTEGER_CST))
1269 return build_complex (type, rpart, ipart);
1274 if (negate_expr_p (t))
1275 return fold_build2 (COMPLEX_EXPR, type,
1276 fold_negate_expr (TREE_OPERAND (t, 0)),
1277 fold_negate_expr (TREE_OPERAND (t, 1)));
1281 if (negate_expr_p (t))
1282 return fold_build1 (CONJ_EXPR, type,
1283 fold_negate_expr (TREE_OPERAND (t, 0)));
1287 return TREE_OPERAND (t, 0);
1290 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1291 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1293 /* -(A + B) -> (-B) - A. */
1294 if (negate_expr_p (TREE_OPERAND (t, 1))
1295 && reorder_operands_p (TREE_OPERAND (t, 0),
1296 TREE_OPERAND (t, 1)))
1298 tem = negate_expr (TREE_OPERAND (t, 1));
1299 return fold_build2 (MINUS_EXPR, type,
1300 tem, TREE_OPERAND (t, 0));
1303 /* -(A + B) -> (-A) - B. */
1304 if (negate_expr_p (TREE_OPERAND (t, 0)))
1306 tem = negate_expr (TREE_OPERAND (t, 0));
1307 return fold_build2 (MINUS_EXPR, type,
1308 tem, TREE_OPERAND (t, 1));
1314 /* - (A - B) -> B - A */
1315 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1316 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1317 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1318 return fold_build2 (MINUS_EXPR, type,
1319 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1323 if (TYPE_UNSIGNED (type))
1329 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1331 tem = TREE_OPERAND (t, 1);
1332 if (negate_expr_p (tem))
1333 return fold_build2 (TREE_CODE (t), type,
1334 TREE_OPERAND (t, 0), negate_expr (tem));
1335 tem = TREE_OPERAND (t, 0);
1336 if (negate_expr_p (tem))
1337 return fold_build2 (TREE_CODE (t), type,
1338 negate_expr (tem), TREE_OPERAND (t, 1));
1342 case TRUNC_DIV_EXPR:
1343 case ROUND_DIV_EXPR:
1344 case FLOOR_DIV_EXPR:
1346 case EXACT_DIV_EXPR:
1347 /* In general we can't negate A / B, because if A is INT_MIN and
1348 B is 1, we may turn this into INT_MIN / -1 which is undefined
1349 and actually traps on some architectures. But if overflow is
1350 undefined, we can negate, because - (INT_MIN / 1) is an
1352 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1354 const char * const warnmsg = G_("assuming signed overflow does not "
1355 "occur when negating a division");
1356 tem = TREE_OPERAND (t, 1);
1357 if (negate_expr_p (tem))
1359 if (INTEGRAL_TYPE_P (type)
1360 && (TREE_CODE (tem) != INTEGER_CST
1361 || integer_onep (tem)))
1362 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1363 return fold_build2 (TREE_CODE (t), type,
1364 TREE_OPERAND (t, 0), negate_expr (tem));
1366 tem = TREE_OPERAND (t, 0);
1367 if (negate_expr_p (tem))
1369 if (INTEGRAL_TYPE_P (type)
1370 && (TREE_CODE (tem) != INTEGER_CST
1371 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1372 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373 return fold_build2 (TREE_CODE (t), type,
1374 negate_expr (tem), TREE_OPERAND (t, 1));
1380 /* Convert -((double)float) into (double)(-float). */
1381 if (TREE_CODE (type) == REAL_TYPE)
1383 tem = strip_float_extensions (t);
1384 if (tem != t && negate_expr_p (tem))
1385 return fold_convert (type, negate_expr (tem));
1390 /* Negate -f(x) as f(-x). */
1391 if (negate_mathfn_p (builtin_mathfn_code (t))
1392 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1396 fndecl = get_callee_fndecl (t);
1397 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1398 return build_call_expr (fndecl, 1, arg);
1403 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1404 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1406 tree op1 = TREE_OPERAND (t, 1);
1407 if (TREE_INT_CST_HIGH (op1) == 0
1408 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1409 == TREE_INT_CST_LOW (op1))
1411 tree ntype = TYPE_UNSIGNED (type)
1412 ? signed_type_for (type)
1413 : unsigned_type_for (type);
1414 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1415 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1416 return fold_convert (type, temp);
1428 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1429 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1430 return NULL_TREE. */
1433 negate_expr (tree t)
1440 type = TREE_TYPE (t);
1441 STRIP_SIGN_NOPS (t);
1443 tem = fold_negate_expr (t);
1445 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1446 return fold_convert (type, tem);
1449 /* Split a tree IN into a constant, literal and variable parts that could be
1450 combined with CODE to make IN. "constant" means an expression with
1451 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1452 commutative arithmetic operation. Store the constant part into *CONP,
1453 the literal in *LITP and return the variable part. If a part isn't
1454 present, set it to null. If the tree does not decompose in this way,
1455 return the entire tree as the variable part and the other parts as null.
1457 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1458 case, we negate an operand that was subtracted. Except if it is a
1459 literal for which we use *MINUS_LITP instead.
1461 If NEGATE_P is true, we are negating all of IN, again except a literal
1462 for which we use *MINUS_LITP instead.
1464 If IN is itself a literal or constant, return it as appropriate.
1466 Note that we do not guarantee that any of the three values will be the
1467 same type as IN, but they will have the same signedness and mode. */
1470 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1471 tree *minus_litp, int negate_p)
1479 /* Strip any conversions that don't change the machine mode or signedness. */
1480 STRIP_SIGN_NOPS (in);
1482 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1484 else if (TREE_CODE (in) == code
1485 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1486 /* We can associate addition and subtraction together (even
1487 though the C standard doesn't say so) for integers because
1488 the value is not affected. For reals, the value might be
1489 affected, so we can't. */
1490 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1491 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1493 tree op0 = TREE_OPERAND (in, 0);
1494 tree op1 = TREE_OPERAND (in, 1);
1495 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1496 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1498 /* First see if either of the operands is a literal, then a constant. */
1499 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1500 *litp = op0, op0 = 0;
1501 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1502 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1504 if (op0 != 0 && TREE_CONSTANT (op0))
1505 *conp = op0, op0 = 0;
1506 else if (op1 != 0 && TREE_CONSTANT (op1))
1507 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1509 /* If we haven't dealt with either operand, this is not a case we can
1510 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1511 if (op0 != 0 && op1 != 0)
1516 var = op1, neg_var_p = neg1_p;
1518 /* Now do any needed negations. */
1520 *minus_litp = *litp, *litp = 0;
1522 *conp = negate_expr (*conp);
1524 var = negate_expr (var);
1526 else if (TREE_CONSTANT (in))
1534 *minus_litp = *litp, *litp = 0;
1535 else if (*minus_litp)
1536 *litp = *minus_litp, *minus_litp = 0;
1537 *conp = negate_expr (*conp);
1538 var = negate_expr (var);
1544 /* Re-associate trees split by the above function. T1 and T2 are either
1545 expressions to associate or null. Return the new expression, if any. If
1546 we build an operation, do it in TYPE and with CODE. */
1549 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1556 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1557 try to fold this since we will have infinite recursion. But do
1558 deal with any NEGATE_EXPRs. */
1559 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1560 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1562 if (code == PLUS_EXPR)
1564 if (TREE_CODE (t1) == NEGATE_EXPR)
1565 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1566 fold_convert (type, TREE_OPERAND (t1, 0)));
1567 else if (TREE_CODE (t2) == NEGATE_EXPR)
1568 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1569 fold_convert (type, TREE_OPERAND (t2, 0)));
1570 else if (integer_zerop (t2))
1571 return fold_convert (type, t1);
1573 else if (code == MINUS_EXPR)
1575 if (integer_zerop (t2))
1576 return fold_convert (type, t1);
1579 return build2 (code, type, fold_convert (type, t1),
1580 fold_convert (type, t2));
1583 return fold_build2 (code, type, fold_convert (type, t1),
1584 fold_convert (type, t2));
1587 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1588 for use in int_const_binop, size_binop and size_diffop. */
1591 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1593 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1595 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1610 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1611 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1612 && TYPE_MODE (type1) == TYPE_MODE (type2);
1616 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1617 to produce a new constant. Return NULL_TREE if we don't know how
1618 to evaluate CODE at compile-time.
1620 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1623 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1625 unsigned HOST_WIDE_INT int1l, int2l;
1626 HOST_WIDE_INT int1h, int2h;
1627 unsigned HOST_WIDE_INT low;
1629 unsigned HOST_WIDE_INT garbagel;
1630 HOST_WIDE_INT garbageh;
1632 tree type = TREE_TYPE (arg1);
1633 int uns = TYPE_UNSIGNED (type);
1635 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1638 int1l = TREE_INT_CST_LOW (arg1);
1639 int1h = TREE_INT_CST_HIGH (arg1);
1640 int2l = TREE_INT_CST_LOW (arg2);
1641 int2h = TREE_INT_CST_HIGH (arg2);
1646 low = int1l | int2l, hi = int1h | int2h;
1650 low = int1l ^ int2l, hi = int1h ^ int2h;
1654 low = int1l & int2l, hi = int1h & int2h;
1660 /* It's unclear from the C standard whether shifts can overflow.
1661 The following code ignores overflow; perhaps a C standard
1662 interpretation ruling is needed. */
1663 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1670 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1675 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1679 neg_double (int2l, int2h, &low, &hi);
1680 add_double (int1l, int1h, low, hi, &low, &hi);
1681 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1685 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1688 case TRUNC_DIV_EXPR:
1689 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1690 case EXACT_DIV_EXPR:
1691 /* This is a shortcut for a common special case. */
1692 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1693 && !TREE_OVERFLOW (arg1)
1694 && !TREE_OVERFLOW (arg2)
1695 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1697 if (code == CEIL_DIV_EXPR)
1700 low = int1l / int2l, hi = 0;
1704 /* ... fall through ... */
1706 case ROUND_DIV_EXPR:
1707 if (int2h == 0 && int2l == 0)
1709 if (int2h == 0 && int2l == 1)
1711 low = int1l, hi = int1h;
1714 if (int1l == int2l && int1h == int2h
1715 && ! (int1l == 0 && int1h == 0))
1720 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1721 &low, &hi, &garbagel, &garbageh);
1724 case TRUNC_MOD_EXPR:
1725 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1726 /* This is a shortcut for a common special case. */
1727 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1728 && !TREE_OVERFLOW (arg1)
1729 && !TREE_OVERFLOW (arg2)
1730 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1732 if (code == CEIL_MOD_EXPR)
1734 low = int1l % int2l, hi = 0;
1738 /* ... fall through ... */
1740 case ROUND_MOD_EXPR:
1741 if (int2h == 0 && int2l == 0)
1743 overflow = div_and_round_double (code, uns,
1744 int1l, int1h, int2l, int2h,
1745 &garbagel, &garbageh, &low, &hi);
1751 low = (((unsigned HOST_WIDE_INT) int1h
1752 < (unsigned HOST_WIDE_INT) int2h)
1753 || (((unsigned HOST_WIDE_INT) int1h
1754 == (unsigned HOST_WIDE_INT) int2h)
1757 low = (int1h < int2h
1758 || (int1h == int2h && int1l < int2l));
1760 if (low == (code == MIN_EXPR))
1761 low = int1l, hi = int1h;
1763 low = int2l, hi = int2h;
1772 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1774 /* Propagate overflow flags ourselves. */
1775 if (((!uns || is_sizetype) && overflow)
1776 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1779 TREE_OVERFLOW (t) = 1;
1783 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1784 ((!uns || is_sizetype) && overflow)
1785 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1790 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1791 constant. We assume ARG1 and ARG2 have the same data type, or at least
1792 are the same kind of constant and the same machine mode. Return zero if
1793 combining the constants is not allowed in the current operating mode.
1795 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1798 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1800 /* Sanity check for the recursive cases. */
1807 if (TREE_CODE (arg1) == INTEGER_CST)
1808 return int_const_binop (code, arg1, arg2, notrunc);
1810 if (TREE_CODE (arg1) == REAL_CST)
1812 enum machine_mode mode;
1815 REAL_VALUE_TYPE value;
1816 REAL_VALUE_TYPE result;
1820 /* The following codes are handled by real_arithmetic. */
1835 d1 = TREE_REAL_CST (arg1);
1836 d2 = TREE_REAL_CST (arg2);
1838 type = TREE_TYPE (arg1);
1839 mode = TYPE_MODE (type);
1841 /* Don't perform operation if we honor signaling NaNs and
1842 either operand is a NaN. */
1843 if (HONOR_SNANS (mode)
1844 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1847 /* Don't perform operation if it would raise a division
1848 by zero exception. */
1849 if (code == RDIV_EXPR
1850 && REAL_VALUES_EQUAL (d2, dconst0)
1851 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1854 /* If either operand is a NaN, just return it. Otherwise, set up
1855 for floating-point trap; we return an overflow. */
1856 if (REAL_VALUE_ISNAN (d1))
1858 else if (REAL_VALUE_ISNAN (d2))
1861 inexact = real_arithmetic (&value, code, &d1, &d2);
1862 real_convert (&result, mode, &value);
1864 /* Don't constant fold this floating point operation if
1865 the result has overflowed and flag_trapping_math. */
1866 if (flag_trapping_math
1867 && MODE_HAS_INFINITIES (mode)
1868 && REAL_VALUE_ISINF (result)
1869 && !REAL_VALUE_ISINF (d1)
1870 && !REAL_VALUE_ISINF (d2))
1873 /* Don't constant fold this floating point operation if the
1874 result may dependent upon the run-time rounding mode and
1875 flag_rounding_math is set, or if GCC's software emulation
1876 is unable to accurately represent the result. */
1877 if ((flag_rounding_math
1878 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1879 && !flag_unsafe_math_optimizations))
1880 && (inexact || !real_identical (&result, &value)))
1883 t = build_real (type, result);
1885 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1889 if (TREE_CODE (arg1) == COMPLEX_CST)
1891 tree type = TREE_TYPE (arg1);
1892 tree r1 = TREE_REALPART (arg1);
1893 tree i1 = TREE_IMAGPART (arg1);
1894 tree r2 = TREE_REALPART (arg2);
1895 tree i2 = TREE_IMAGPART (arg2);
1902 real = const_binop (code, r1, r2, notrunc);
1903 imag = const_binop (code, i1, i2, notrunc);
1907 real = const_binop (MINUS_EXPR,
1908 const_binop (MULT_EXPR, r1, r2, notrunc),
1909 const_binop (MULT_EXPR, i1, i2, notrunc),
1911 imag = const_binop (PLUS_EXPR,
1912 const_binop (MULT_EXPR, r1, i2, notrunc),
1913 const_binop (MULT_EXPR, i1, r2, notrunc),
1920 = const_binop (PLUS_EXPR,
1921 const_binop (MULT_EXPR, r2, r2, notrunc),
1922 const_binop (MULT_EXPR, i2, i2, notrunc),
1925 = const_binop (PLUS_EXPR,
1926 const_binop (MULT_EXPR, r1, r2, notrunc),
1927 const_binop (MULT_EXPR, i1, i2, notrunc),
1930 = const_binop (MINUS_EXPR,
1931 const_binop (MULT_EXPR, i1, r2, notrunc),
1932 const_binop (MULT_EXPR, r1, i2, notrunc),
1935 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1936 code = TRUNC_DIV_EXPR;
1938 real = const_binop (code, t1, magsquared, notrunc);
1939 imag = const_binop (code, t2, magsquared, notrunc);
1948 return build_complex (type, real, imag);
1954 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1955 indicates which particular sizetype to create. */
1958 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1960 return build_int_cst (sizetype_tab[(int) kind], number);
1963 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1964 is a tree code. The type of the result is taken from the operands.
1965 Both must be equivalent integer types, ala int_binop_types_match_p.
1966 If the operands are constant, so is the result. */
1969 size_binop (enum tree_code code, tree arg0, tree arg1)
1971 tree type = TREE_TYPE (arg0);
1973 if (arg0 == error_mark_node || arg1 == error_mark_node)
1974 return error_mark_node;
1976 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1979 /* Handle the special case of two integer constants faster. */
1980 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1982 /* And some specific cases even faster than that. */
1983 if (code == PLUS_EXPR)
1985 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1987 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1990 else if (code == MINUS_EXPR)
1992 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1995 else if (code == MULT_EXPR)
1997 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2001 /* Handle general case of two integer constants. */
2002 return int_const_binop (code, arg0, arg1, 0);
2005 return fold_build2 (code, type, arg0, arg1);
2008 /* Given two values, either both of sizetype or both of bitsizetype,
2009 compute the difference between the two values. Return the value
2010 in signed type corresponding to the type of the operands. */
2013 size_diffop (tree arg0, tree arg1)
2015 tree type = TREE_TYPE (arg0);
2018 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2021 /* If the type is already signed, just do the simple thing. */
2022 if (!TYPE_UNSIGNED (type))
2023 return size_binop (MINUS_EXPR, arg0, arg1);
2025 if (type == sizetype)
2027 else if (type == bitsizetype)
2028 ctype = sbitsizetype;
2030 ctype = signed_type_for (type);
2032 /* If either operand is not a constant, do the conversions to the signed
2033 type and subtract. The hardware will do the right thing with any
2034 overflow in the subtraction. */
2035 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2036 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2037 fold_convert (ctype, arg1));
2039 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2040 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2041 overflow) and negate (which can't either). Special-case a result
2042 of zero while we're here. */
2043 if (tree_int_cst_equal (arg0, arg1))
2044 return build_int_cst (ctype, 0);
2045 else if (tree_int_cst_lt (arg1, arg0))
2046 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2048 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2049 fold_convert (ctype, size_binop (MINUS_EXPR,
2053 /* A subroutine of fold_convert_const handling conversions of an
2054 INTEGER_CST to another integer type. */
2057 fold_convert_const_int_from_int (tree type, tree arg1)
2061 /* Given an integer constant, make new constant with new type,
2062 appropriately sign-extended or truncated. */
2063 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2064 TREE_INT_CST_HIGH (arg1),
2065 /* Don't set the overflow when
2066 converting a pointer */
2067 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2068 (TREE_INT_CST_HIGH (arg1) < 0
2069 && (TYPE_UNSIGNED (type)
2070 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2071 | TREE_OVERFLOW (arg1));
2076 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2077 to an integer type. */
2080 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
2085 /* The following code implements the floating point to integer
2086 conversion rules required by the Java Language Specification,
2087 that IEEE NaNs are mapped to zero and values that overflow
2088 the target precision saturate, i.e. values greater than
2089 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2090 are mapped to INT_MIN. These semantics are allowed by the
2091 C and C++ standards that simply state that the behavior of
2092 FP-to-integer conversion is unspecified upon overflow. */
2094 HOST_WIDE_INT high, low;
2096 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2100 case FIX_TRUNC_EXPR:
2101 real_trunc (&r, VOIDmode, &x);
2108 /* If R is NaN, return zero and show we have an overflow. */
2109 if (REAL_VALUE_ISNAN (r))
2116 /* See if R is less than the lower bound or greater than the
2121 tree lt = TYPE_MIN_VALUE (type);
2122 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2123 if (REAL_VALUES_LESS (r, l))
2126 high = TREE_INT_CST_HIGH (lt);
2127 low = TREE_INT_CST_LOW (lt);
2133 tree ut = TYPE_MAX_VALUE (type);
2136 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2137 if (REAL_VALUES_LESS (u, r))
2140 high = TREE_INT_CST_HIGH (ut);
2141 low = TREE_INT_CST_LOW (ut);
2147 REAL_VALUE_TO_INT (&low, &high, r);
2149 t = force_fit_type_double (type, low, high, -1,
2150 overflow | TREE_OVERFLOW (arg1));
2154 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2155 to another floating point type. */
2158 fold_convert_const_real_from_real (tree type, tree arg1)
2160 REAL_VALUE_TYPE value;
2163 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2164 t = build_real (type, value);
2166 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2170 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2171 type TYPE. If no simplification can be done return NULL_TREE. */
2174 fold_convert_const (enum tree_code code, tree type, tree arg1)
2176 if (TREE_TYPE (arg1) == type)
2179 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2181 if (TREE_CODE (arg1) == INTEGER_CST)
2182 return fold_convert_const_int_from_int (type, arg1);
2183 else if (TREE_CODE (arg1) == REAL_CST)
2184 return fold_convert_const_int_from_real (code, type, arg1);
2186 else if (TREE_CODE (type) == REAL_TYPE)
2188 if (TREE_CODE (arg1) == INTEGER_CST)
2189 return build_real_from_int_cst (type, arg1);
2190 if (TREE_CODE (arg1) == REAL_CST)
2191 return fold_convert_const_real_from_real (type, arg1);
2196 /* Construct a vector of zero elements of vector type TYPE. */
2199 build_zero_vector (tree type)
2204 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2205 units = TYPE_VECTOR_SUBPARTS (type);
2208 for (i = 0; i < units; i++)
2209 list = tree_cons (NULL_TREE, elem, list);
2210 return build_vector (type, list);
2213 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2216 fold_convertible_p (const_tree type, const_tree arg)
2218 tree orig = TREE_TYPE (arg);
2223 if (TREE_CODE (arg) == ERROR_MARK
2224 || TREE_CODE (type) == ERROR_MARK
2225 || TREE_CODE (orig) == ERROR_MARK)
2228 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2231 switch (TREE_CODE (type))
2233 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2234 case POINTER_TYPE: case REFERENCE_TYPE:
2236 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2237 || TREE_CODE (orig) == OFFSET_TYPE)
2239 return (TREE_CODE (orig) == VECTOR_TYPE
2240 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2243 return TREE_CODE (type) == TREE_CODE (orig);
2247 /* Convert expression ARG to type TYPE. Used by the middle-end for
2248 simple conversions in preference to calling the front-end's convert. */
2251 fold_convert (tree type, tree arg)
2253 tree orig = TREE_TYPE (arg);
2259 if (TREE_CODE (arg) == ERROR_MARK
2260 || TREE_CODE (type) == ERROR_MARK
2261 || TREE_CODE (orig) == ERROR_MARK)
2262 return error_mark_node;
2264 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2265 return fold_build1 (NOP_EXPR, type, arg);
2267 switch (TREE_CODE (type))
2269 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2270 case POINTER_TYPE: case REFERENCE_TYPE:
2272 if (TREE_CODE (arg) == INTEGER_CST)
2274 tem = fold_convert_const (NOP_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2278 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2279 || TREE_CODE (orig) == OFFSET_TYPE)
2280 return fold_build1 (NOP_EXPR, type, arg);
2281 if (TREE_CODE (orig) == COMPLEX_TYPE)
2283 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2284 return fold_convert (type, tem);
2286 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2287 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2288 return fold_build1 (NOP_EXPR, type, arg);
2291 if (TREE_CODE (arg) == INTEGER_CST)
2293 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2294 if (tem != NULL_TREE)
2297 else if (TREE_CODE (arg) == REAL_CST)
2299 tem = fold_convert_const (NOP_EXPR, type, arg);
2300 if (tem != NULL_TREE)
2304 switch (TREE_CODE (orig))
2307 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2308 case POINTER_TYPE: case REFERENCE_TYPE:
2309 return fold_build1 (FLOAT_EXPR, type, arg);
2312 return fold_build1 (NOP_EXPR, type, arg);
2315 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2316 return fold_convert (type, tem);
2323 switch (TREE_CODE (orig))
2326 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2327 case POINTER_TYPE: case REFERENCE_TYPE:
2329 return build2 (COMPLEX_EXPR, type,
2330 fold_convert (TREE_TYPE (type), arg),
2331 fold_convert (TREE_TYPE (type), integer_zero_node));
2336 if (TREE_CODE (arg) == COMPLEX_EXPR)
2338 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2339 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2340 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2343 arg = save_expr (arg);
2344 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2345 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2346 rpart = fold_convert (TREE_TYPE (type), rpart);
2347 ipart = fold_convert (TREE_TYPE (type), ipart);
2348 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2356 if (integer_zerop (arg))
2357 return build_zero_vector (type);
2358 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2359 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2360 || TREE_CODE (orig) == VECTOR_TYPE);
2361 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2364 tem = fold_ignored_result (arg);
2365 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2367 return fold_build1 (NOP_EXPR, type, tem);
2374 /* Return false if expr can be assumed not to be an lvalue, true
2378 maybe_lvalue_p (tree x)
2380 /* We only need to wrap lvalue tree codes. */
2381 switch (TREE_CODE (x))
2392 case ALIGN_INDIRECT_REF:
2393 case MISALIGNED_INDIRECT_REF:
2395 case ARRAY_RANGE_REF:
2401 case PREINCREMENT_EXPR:
2402 case PREDECREMENT_EXPR:
2404 case TRY_CATCH_EXPR:
2405 case WITH_CLEANUP_EXPR:
2408 case GIMPLE_MODIFY_STMT:
2417 /* Assume the worst for front-end tree codes. */
2418 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 /* Return an expr equal to X but certainly not valid as an lvalue. */
2431 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2436 if (! maybe_lvalue_p (x))
2438 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2441 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2442 Zero means allow extended lvalues. */
2444 int pedantic_lvalues;
2446 /* When pedantic, return an expr equal to X but certainly not valid as a
2447 pedantic lvalue. Otherwise, return X. */
2450 pedantic_non_lvalue (tree x)
2452 if (pedantic_lvalues)
2453 return non_lvalue (x);
2458 /* Given a tree comparison code, return the code that is the logical inverse
2459 of the given code. It is not safe to do this for floating-point
2460 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2461 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2464 invert_tree_comparison (enum tree_code code, bool honor_nans)
2466 if (honor_nans && flag_trapping_math)
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2508 swap_tree_comparison (enum tree_code code)
2515 case UNORDERED_EXPR:
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2567 return COMPCODE_UNLT;
2569 return COMPCODE_UNEQ;
2571 return COMPCODE_UNLE;
2573 return COMPCODE_UNGT;
2575 return COMPCODE_LTGT;
2577 return COMPCODE_UNGE;
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2632 combine_comparisons (enum tree_code code, enum tree_code lcode,
2633 enum tree_code rcode, tree truth_type,
2634 tree ll_arg, tree lr_arg)
2636 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2637 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2638 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2639 enum comparison_code compcode;
2643 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2644 compcode = lcompcode & rcompcode;
2647 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2648 compcode = lcompcode | rcompcode;
2657 /* Eliminate unordered comparisons, as well as LTGT and ORD
2658 which are not used unless the mode has NaNs. */
2659 compcode &= ~COMPCODE_UNORD;
2660 if (compcode == COMPCODE_LTGT)
2661 compcode = COMPCODE_NE;
2662 else if (compcode == COMPCODE_ORD)
2663 compcode = COMPCODE_TRUE;
2665 else if (flag_trapping_math)
2667 /* Check that the original operation and the optimized ones will trap
2668 under the same condition. */
2669 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2670 && (lcompcode != COMPCODE_EQ)
2671 && (lcompcode != COMPCODE_ORD);
2672 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2673 && (rcompcode != COMPCODE_EQ)
2674 && (rcompcode != COMPCODE_ORD);
2675 bool trap = (compcode & COMPCODE_UNORD) == 0
2676 && (compcode != COMPCODE_EQ)
2677 && (compcode != COMPCODE_ORD);
2679 /* In a short-circuited boolean expression the LHS might be
2680 such that the RHS, if evaluated, will never trap. For
2681 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2682 if neither x nor y is NaN. (This is a mixed blessing: for
2683 example, the expression above will never trap, hence
2684 optimizing it to x < y would be invalid). */
2685 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2686 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2689 /* If the comparison was short-circuited, and only the RHS
2690 trapped, we may now generate a spurious trap. */
2692 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2695 /* If we changed the conditions that cause a trap, we lose. */
2696 if ((ltrap || rtrap) != trap)
2700 if (compcode == COMPCODE_TRUE)
2701 return constant_boolean_node (true, truth_type);
2702 else if (compcode == COMPCODE_FALSE)
2703 return constant_boolean_node (false, truth_type);
2705 return fold_build2 (compcode_to_comparison (compcode),
2706 truth_type, ll_arg, lr_arg);
2709 /* Return nonzero if CODE is a tree code that represents a truth value. */
2712 truth_value_p (enum tree_code code)
2714 return (TREE_CODE_CLASS (code) == tcc_comparison
2715 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2716 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2717 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2720 /* Return nonzero if two operands (typically of the same tree node)
2721 are necessarily equal. If either argument has side-effects this
2722 function returns zero. FLAGS modifies behavior as follows:
2724 If OEP_ONLY_CONST is set, only return nonzero for constants.
2725 This function tests whether the operands are indistinguishable;
2726 it does not test whether they are equal using C's == operation.
2727 The distinction is important for IEEE floating point, because
2728 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2729 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2731 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2732 even though it may hold multiple values during a function.
2733 This is because a GCC tree node guarantees that nothing else is
2734 executed between the evaluation of its "operands" (which may often
2735 be evaluated in arbitrary order). Hence if the operands themselves
2736 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2737 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2738 unset means assuming isochronic (or instantaneous) tree equivalence.
2739 Unless comparing arbitrary expression trees, such as from different
2740 statements, this flag can usually be left unset.
2742 If OEP_PURE_SAME is set, then pure functions with identical arguments
2743 are considered the same. It is used when the caller has other ways
2744 to ensure that global memory is unchanged in between. */
2747 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2749 /* If either is ERROR_MARK, they aren't equal. */
2750 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2753 /* If both types don't have the same signedness, then we can't consider
2754 them equal. We must check this before the STRIP_NOPS calls
2755 because they may change the signedness of the arguments. */
2756 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2759 /* If both types don't have the same precision, then it is not safe
2761 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2767 /* In case both args are comparisons but with different comparison
2768 code, try to swap the comparison operands of one arg to produce
2769 a match and compare that variant. */
2770 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2771 && COMPARISON_CLASS_P (arg0)
2772 && COMPARISON_CLASS_P (arg1))
2774 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2776 if (TREE_CODE (arg0) == swap_code)
2777 return operand_equal_p (TREE_OPERAND (arg0, 0),
2778 TREE_OPERAND (arg1, 1), flags)
2779 && operand_equal_p (TREE_OPERAND (arg0, 1),
2780 TREE_OPERAND (arg1, 0), flags);
2783 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2784 /* This is needed for conversions and for COMPONENT_REF.
2785 Might as well play it safe and always test this. */
2786 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2787 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2788 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2791 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2792 We don't care about side effects in that case because the SAVE_EXPR
2793 takes care of that for us. In all other cases, two expressions are
2794 equal if they have no side effects. If we have two identical
2795 expressions with side effects that should be treated the same due
2796 to the only side effects being identical SAVE_EXPR's, that will
2797 be detected in the recursive calls below. */
2798 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2799 && (TREE_CODE (arg0) == SAVE_EXPR
2800 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2803 /* Next handle constant cases, those for which we can return 1 even
2804 if ONLY_CONST is set. */
2805 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2806 switch (TREE_CODE (arg0))
2809 return tree_int_cst_equal (arg0, arg1);
2812 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2813 TREE_REAL_CST (arg1)))
2817 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2819 /* If we do not distinguish between signed and unsigned zero,
2820 consider them equal. */
2821 if (real_zerop (arg0) && real_zerop (arg1))
2830 v1 = TREE_VECTOR_CST_ELTS (arg0);
2831 v2 = TREE_VECTOR_CST_ELTS (arg1);
2834 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2837 v1 = TREE_CHAIN (v1);
2838 v2 = TREE_CHAIN (v2);
2845 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2847 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2851 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2852 && ! memcmp (TREE_STRING_POINTER (arg0),
2853 TREE_STRING_POINTER (arg1),
2854 TREE_STRING_LENGTH (arg0)));
2857 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2863 if (flags & OEP_ONLY_CONST)
2866 /* Define macros to test an operand from arg0 and arg1 for equality and a
2867 variant that allows null and views null as being different from any
2868 non-null value. In the latter case, if either is null, the both
2869 must be; otherwise, do the normal comparison. */
2870 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2871 TREE_OPERAND (arg1, N), flags)
2873 #define OP_SAME_WITH_NULL(N) \
2874 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2875 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2877 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2880 /* Two conversions are equal only if signedness and modes match. */
2881 switch (TREE_CODE (arg0))
2885 case FIX_TRUNC_EXPR:
2886 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2887 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2897 case tcc_comparison:
2899 if (OP_SAME (0) && OP_SAME (1))
2902 /* For commutative ops, allow the other order. */
2903 return (commutative_tree_code (TREE_CODE (arg0))
2904 && operand_equal_p (TREE_OPERAND (arg0, 0),
2905 TREE_OPERAND (arg1, 1), flags)
2906 && operand_equal_p (TREE_OPERAND (arg0, 1),
2907 TREE_OPERAND (arg1, 0), flags));
2910 /* If either of the pointer (or reference) expressions we are
2911 dereferencing contain a side effect, these cannot be equal. */
2912 if (TREE_SIDE_EFFECTS (arg0)
2913 || TREE_SIDE_EFFECTS (arg1))
2916 switch (TREE_CODE (arg0))
2919 case ALIGN_INDIRECT_REF:
2920 case MISALIGNED_INDIRECT_REF:
2926 case ARRAY_RANGE_REF:
2927 /* Operands 2 and 3 may be null.
2928 Compare the array index by value if it is constant first as we
2929 may have different types but same value here. */
2931 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2932 TREE_OPERAND (arg1, 1))
2934 && OP_SAME_WITH_NULL (2)
2935 && OP_SAME_WITH_NULL (3));
2938 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2939 may be NULL when we're called to compare MEM_EXPRs. */
2940 return OP_SAME_WITH_NULL (0)
2942 && OP_SAME_WITH_NULL (2);
2945 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2951 case tcc_expression:
2952 switch (TREE_CODE (arg0))
2955 case TRUTH_NOT_EXPR:
2958 case TRUTH_ANDIF_EXPR:
2959 case TRUTH_ORIF_EXPR:
2960 return OP_SAME (0) && OP_SAME (1);
2962 case TRUTH_AND_EXPR:
2964 case TRUTH_XOR_EXPR:
2965 if (OP_SAME (0) && OP_SAME (1))
2968 /* Otherwise take into account this is a commutative operation. */
2969 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2970 TREE_OPERAND (arg1, 1), flags)
2971 && operand_equal_p (TREE_OPERAND (arg0, 1),
2972 TREE_OPERAND (arg1, 0), flags));
2979 switch (TREE_CODE (arg0))
2982 /* If the CALL_EXPRs call different functions, then they
2983 clearly can not be equal. */
2984 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2989 unsigned int cef = call_expr_flags (arg0);
2990 if (flags & OEP_PURE_SAME)
2991 cef &= ECF_CONST | ECF_PURE;
2998 /* Now see if all the arguments are the same. */
3000 const_call_expr_arg_iterator iter0, iter1;
3002 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3003 a1 = first_const_call_expr_arg (arg1, &iter1);
3005 a0 = next_const_call_expr_arg (&iter0),
3006 a1 = next_const_call_expr_arg (&iter1))
3007 if (! operand_equal_p (a0, a1, flags))
3010 /* If we get here and both argument lists are exhausted
3011 then the CALL_EXPRs are equal. */
3012 return ! (a0 || a1);
3018 case tcc_declaration:
3019 /* Consider __builtin_sqrt equal to sqrt. */
3020 return (TREE_CODE (arg0) == FUNCTION_DECL
3021 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3022 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3023 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3030 #undef OP_SAME_WITH_NULL
3033 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3034 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3036 When in doubt, return 0. */
3039 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3041 int unsignedp1, unsignedpo;
3042 tree primarg0, primarg1, primother;
3043 unsigned int correct_width;
3045 if (operand_equal_p (arg0, arg1, 0))
3048 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3049 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3052 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3053 and see if the inner values are the same. This removes any
3054 signedness comparison, which doesn't matter here. */
3055 primarg0 = arg0, primarg1 = arg1;
3056 STRIP_NOPS (primarg0);
3057 STRIP_NOPS (primarg1);
3058 if (operand_equal_p (primarg0, primarg1, 0))
3061 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3062 actual comparison operand, ARG0.
3064 First throw away any conversions to wider types
3065 already present in the operands. */
3067 primarg1 = get_narrower (arg1, &unsignedp1);
3068 primother = get_narrower (other, &unsignedpo);
3070 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3071 if (unsignedp1 == unsignedpo
3072 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3073 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3075 tree type = TREE_TYPE (arg0);
3077 /* Make sure shorter operand is extended the right way
3078 to match the longer operand. */
3079 primarg1 = fold_convert (signed_or_unsigned_type_for
3080 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3082 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3089 /* See if ARG is an expression that is either a comparison or is performing
3090 arithmetic on comparisons. The comparisons must only be comparing
3091 two different values, which will be stored in *CVAL1 and *CVAL2; if
3092 they are nonzero it means that some operands have already been found.
3093 No variables may be used anywhere else in the expression except in the
3094 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3095 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3097 If this is true, return 1. Otherwise, return zero. */
3100 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3102 enum tree_code code = TREE_CODE (arg);
3103 enum tree_code_class class = TREE_CODE_CLASS (code);
3105 /* We can handle some of the tcc_expression cases here. */
3106 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3108 else if (class == tcc_expression
3109 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3110 || code == COMPOUND_EXPR))
3113 else if (class == tcc_expression && code == SAVE_EXPR
3114 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3116 /* If we've already found a CVAL1 or CVAL2, this expression is
3117 two complex to handle. */
3118 if (*cval1 || *cval2)
3128 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3131 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3132 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3133 cval1, cval2, save_p));
3138 case tcc_expression:
3139 if (code == COND_EXPR)
3140 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3141 cval1, cval2, save_p)
3142 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3143 cval1, cval2, save_p)
3144 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3145 cval1, cval2, save_p));
3148 case tcc_comparison:
3149 /* First see if we can handle the first operand, then the second. For
3150 the second operand, we know *CVAL1 can't be zero. It must be that
3151 one side of the comparison is each of the values; test for the
3152 case where this isn't true by failing if the two operands
3155 if (operand_equal_p (TREE_OPERAND (arg, 0),
3156 TREE_OPERAND (arg, 1), 0))
3160 *cval1 = TREE_OPERAND (arg, 0);
3161 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3163 else if (*cval2 == 0)
3164 *cval2 = TREE_OPERAND (arg, 0);
3165 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3170 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3172 else if (*cval2 == 0)
3173 *cval2 = TREE_OPERAND (arg, 1);
3174 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3186 /* ARG is a tree that is known to contain just arithmetic operations and
3187 comparisons. Evaluate the operations in the tree substituting NEW0 for
3188 any occurrence of OLD0 as an operand of a comparison and likewise for
3192 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3194 tree type = TREE_TYPE (arg);
3195 enum tree_code code = TREE_CODE (arg);
3196 enum tree_code_class class = TREE_CODE_CLASS (code);
3198 /* We can handle some of the tcc_expression cases here. */
3199 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3201 else if (class == tcc_expression
3202 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3208 return fold_build1 (code, type,
3209 eval_subst (TREE_OPERAND (arg, 0),
3210 old0, new0, old1, new1));
3213 return fold_build2 (code, type,
3214 eval_subst (TREE_OPERAND (arg, 0),
3215 old0, new0, old1, new1),
3216 eval_subst (TREE_OPERAND (arg, 1),
3217 old0, new0, old1, new1));
3219 case tcc_expression:
3223 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3226 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3229 return fold_build3 (code, type,
3230 eval_subst (TREE_OPERAND (arg, 0),
3231 old0, new0, old1, new1),
3232 eval_subst (TREE_OPERAND (arg, 1),
3233 old0, new0, old1, new1),
3234 eval_subst (TREE_OPERAND (arg, 2),
3235 old0, new0, old1, new1));
3239 /* Fall through - ??? */
3241 case tcc_comparison:
3243 tree arg0 = TREE_OPERAND (arg, 0);
3244 tree arg1 = TREE_OPERAND (arg, 1);
3246 /* We need to check both for exact equality and tree equality. The
3247 former will be true if the operand has a side-effect. In that
3248 case, we know the operand occurred exactly once. */
3250 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3252 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3255 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3257 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3260 return fold_build2 (code, type, arg0, arg1);
3268 /* Return a tree for the case when the result of an expression is RESULT
3269 converted to TYPE and OMITTED was previously an operand of the expression
3270 but is now not needed (e.g., we folded OMITTED * 0).
3272 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3273 the conversion of RESULT to TYPE. */
3276 omit_one_operand (tree type, tree result, tree omitted)
3278 tree t = fold_convert (type, result);
3280 if (TREE_SIDE_EFFECTS (omitted))
3281 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3283 return non_lvalue (t);
3286 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3289 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3291 tree t = fold_convert (type, result);
3293 if (TREE_SIDE_EFFECTS (omitted))
3294 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3296 return pedantic_non_lvalue (t);
3299 /* Return a tree for the case when the result of an expression is RESULT
3300 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3301 of the expression but are now not needed.
3303 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3304 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3305 evaluated before OMITTED2. Otherwise, if neither has side effects,
3306 just do the conversion of RESULT to TYPE. */
3309 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3311 tree t = fold_convert (type, result);
3313 if (TREE_SIDE_EFFECTS (omitted2))
3314 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3315 if (TREE_SIDE_EFFECTS (omitted1))
3316 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3318 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3322 /* Return a simplified tree node for the truth-negation of ARG. This
3323 never alters ARG itself. We assume that ARG is an operation that
3324 returns a truth value (0 or 1).
3326 FIXME: one would think we would fold the result, but it causes
3327 problems with the dominator optimizer. */
3330 fold_truth_not_expr (tree arg)
3332 tree type = TREE_TYPE (arg);
3333 enum tree_code code = TREE_CODE (arg);
3335 /* If this is a comparison, we can simply invert it, except for
3336 floating-point non-equality comparisons, in which case we just
3337 enclose a TRUTH_NOT_EXPR around what we have. */
3339 if (TREE_CODE_CLASS (code) == tcc_comparison)
3341 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3342 if (FLOAT_TYPE_P (op_type)
3343 && flag_trapping_math
3344 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3345 && code != NE_EXPR && code != EQ_EXPR)
3349 code = invert_tree_comparison (code,
3350 HONOR_NANS (TYPE_MODE (op_type)));
3351 if (code == ERROR_MARK)
3354 return build2 (code, type,
3355 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3362 return constant_boolean_node (integer_zerop (arg), type);
3364 case TRUTH_AND_EXPR:
3365 return build2 (TRUTH_OR_EXPR, type,
3366 invert_truthvalue (TREE_OPERAND (arg, 0)),
3367 invert_truthvalue (TREE_OPERAND (arg, 1)));
3370 return build2 (TRUTH_AND_EXPR, type,
3371 invert_truthvalue (TREE_OPERAND (arg, 0)),
3372 invert_truthvalue (TREE_OPERAND (arg, 1)));
3374 case TRUTH_XOR_EXPR:
3375 /* Here we can invert either operand. We invert the first operand
3376 unless the second operand is a TRUTH_NOT_EXPR in which case our
3377 result is the XOR of the first operand with the inside of the
3378 negation of the second operand. */
3380 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3381 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3382 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3384 return build2 (TRUTH_XOR_EXPR, type,
3385 invert_truthvalue (TREE_OPERAND (arg, 0)),
3386 TREE_OPERAND (arg, 1));
3388 case TRUTH_ANDIF_EXPR:
3389 return build2 (TRUTH_ORIF_EXPR, type,
3390 invert_truthvalue (TREE_OPERAND (arg, 0)),
3391 invert_truthvalue (TREE_OPERAND (arg, 1)));
3393 case TRUTH_ORIF_EXPR:
3394 return build2 (TRUTH_ANDIF_EXPR, type,
3395 invert_truthvalue (TREE_OPERAND (arg, 0)),
3396 invert_truthvalue (TREE_OPERAND (arg, 1)));
3398 case TRUTH_NOT_EXPR:
3399 return TREE_OPERAND (arg, 0);
3403 tree arg1 = TREE_OPERAND (arg, 1);
3404 tree arg2 = TREE_OPERAND (arg, 2);
3405 /* A COND_EXPR may have a throw as one operand, which
3406 then has void type. Just leave void operands
3408 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3409 VOID_TYPE_P (TREE_TYPE (arg1))
3410 ? arg1 : invert_truthvalue (arg1),
3411 VOID_TYPE_P (TREE_TYPE (arg2))
3412 ? arg2 : invert_truthvalue (arg2));
3416 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3417 invert_truthvalue (TREE_OPERAND (arg, 1)));
3419 case NON_LVALUE_EXPR:
3420 return invert_truthvalue (TREE_OPERAND (arg, 0));
3423 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3424 return build1 (TRUTH_NOT_EXPR, type, arg);
3428 return build1 (TREE_CODE (arg), type,
3429 invert_truthvalue (TREE_OPERAND (arg, 0)));
3432 if (!integer_onep (TREE_OPERAND (arg, 1)))
3434 return build2 (EQ_EXPR, type, arg,
3435 build_int_cst (type, 0));
3438 return build1 (TRUTH_NOT_EXPR, type, arg);
3440 case CLEANUP_POINT_EXPR:
3441 return build1 (CLEANUP_POINT_EXPR, type,
3442 invert_truthvalue (TREE_OPERAND (arg, 0)));
3451 /* Return a simplified tree node for the truth-negation of ARG. This
3452 never alters ARG itself. We assume that ARG is an operation that
3453 returns a truth value (0 or 1).
3455 FIXME: one would think we would fold the result, but it causes
3456 problems with the dominator optimizer. */
3459 invert_truthvalue (tree arg)
3463 if (TREE_CODE (arg) == ERROR_MARK)
3466 tem = fold_truth_not_expr (arg);
3468 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3473 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3474 operands are another bit-wise operation with a common input. If so,
3475 distribute the bit operations to save an operation and possibly two if
3476 constants are involved. For example, convert
3477 (A | B) & (A | C) into A | (B & C)
3478 Further simplification will occur if B and C are constants.
3480 If this optimization cannot be done, 0 will be returned. */
3483 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3488 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3489 || TREE_CODE (arg0) == code
3490 || (TREE_CODE (arg0) != BIT_AND_EXPR
3491 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3494 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3496 common = TREE_OPERAND (arg0, 0);
3497 left = TREE_OPERAND (arg0, 1);
3498 right = TREE_OPERAND (arg1, 1);
3500 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3502 common = TREE_OPERAND (arg0, 0);
3503 left = TREE_OPERAND (arg0, 1);
3504 right = TREE_OPERAND (arg1, 0);
3506 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3508 common = TREE_OPERAND (arg0, 1);
3509 left = TREE_OPERAND (arg0, 0);
3510 right = TREE_OPERAND (arg1, 1);
3512 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3514 common = TREE_OPERAND (arg0, 1);
3515 left = TREE_OPERAND (arg0, 0);
3516 right = TREE_OPERAND (arg1, 0);
3521 return fold_build2 (TREE_CODE (arg0), type, common,
3522 fold_build2 (code, type, left, right));
3525 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3526 with code CODE. This optimization is unsafe. */
3528 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3530 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3531 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3533 /* (A / C) +- (B / C) -> (A +- B) / C. */
3535 && operand_equal_p (TREE_OPERAND (arg0, 1),
3536 TREE_OPERAND (arg1, 1), 0))
3537 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3538 fold_build2 (code, type,
3539 TREE_OPERAND (arg0, 0),
3540 TREE_OPERAND (arg1, 0)),
3541 TREE_OPERAND (arg0, 1));
3543 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3544 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3545 TREE_OPERAND (arg1, 0), 0)
3546 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3547 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3549 REAL_VALUE_TYPE r0, r1;
3550 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3551 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3553 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3555 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3556 real_arithmetic (&r0, code, &r0, &r1);
3557 return fold_build2 (MULT_EXPR, type,
3558 TREE_OPERAND (arg0, 0),
3559 build_real (type, r0));
3565 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3566 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3569 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3576 tree size = TYPE_SIZE (TREE_TYPE (inner));
3577 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3578 || POINTER_TYPE_P (TREE_TYPE (inner)))
3579 && host_integerp (size, 0)
3580 && tree_low_cst (size, 0) == bitsize)
3581 return fold_convert (type, inner);
3584 result = build3 (BIT_FIELD_REF, type, inner,
3585 size_int (bitsize), bitsize_int (bitpos));
3587 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3592 /* Optimize a bit-field compare.
3594 There are two cases: First is a compare against a constant and the
3595 second is a comparison of two items where the fields are at the same
3596 bit position relative to the start of a chunk (byte, halfword, word)
3597 large enough to contain it. In these cases we can avoid the shift
3598 implicit in bitfield extractions.
3600 For constants, we emit a compare of the shifted constant with the
3601 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3602 compared. For two fields at the same position, we do the ANDs with the
3603 similar mask and compare the result of the ANDs.
3605 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3606 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3607 are the left and right operands of the comparison, respectively.
3609 If the optimization described above can be done, we return the resulting
3610 tree. Otherwise we return zero. */
3613 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3616 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3617 tree type = TREE_TYPE (lhs);
3618 tree signed_type, unsigned_type;
3619 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3620 enum machine_mode lmode, rmode, nmode;
3621 int lunsignedp, runsignedp;
3622 int lvolatilep = 0, rvolatilep = 0;
3623 tree linner, rinner = NULL_TREE;
3627 /* Get all the information about the extractions being done. If the bit size
3628 if the same as the size of the underlying object, we aren't doing an
3629 extraction at all and so can do nothing. We also don't want to
3630 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3631 then will no longer be able to replace it. */
3632 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3633 &lunsignedp, &lvolatilep, false);
3634 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3635 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3640 /* If this is not a constant, we can only do something if bit positions,
3641 sizes, and signedness are the same. */
3642 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3643 &runsignedp, &rvolatilep, false);
3645 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3646 || lunsignedp != runsignedp || offset != 0
3647 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3651 /* See if we can find a mode to refer to this field. We should be able to,
3652 but fail if we can't. */
3653 nmode = get_best_mode (lbitsize, lbitpos,
3654 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3655 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3656 TYPE_ALIGN (TREE_TYPE (rinner))),
3657 word_mode, lvolatilep || rvolatilep);
3658 if (nmode == VOIDmode)
3661 /* Set signed and unsigned types of the precision of this mode for the
3663 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3664 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3666 /* Compute the bit position and size for the new reference and our offset
3667 within it. If the new reference is the same size as the original, we
3668 won't optimize anything, so return zero. */
3669 nbitsize = GET_MODE_BITSIZE (nmode);
3670 nbitpos = lbitpos & ~ (nbitsize - 1);
3672 if (nbitsize == lbitsize)
3675 if (BYTES_BIG_ENDIAN)
3676 lbitpos = nbitsize - lbitsize - lbitpos;
3678 /* Make the mask to be used against the extracted field. */
3679 mask = build_int_cst_type (unsigned_type, -1);
3680 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3681 mask = const_binop (RSHIFT_EXPR, mask,
3682 size_int (nbitsize - lbitsize - lbitpos), 0);
3685 /* If not comparing with constant, just rework the comparison
3687 return fold_build2 (code, compare_type,
3688 fold_build2 (BIT_AND_EXPR, unsigned_type,
3689 make_bit_field_ref (linner,
3694 fold_build2 (BIT_AND_EXPR, unsigned_type,
3695 make_bit_field_ref (rinner,
3701 /* Otherwise, we are handling the constant case. See if the constant is too
3702 big for the field. Warn and return a tree of for 0 (false) if so. We do
3703 this not only for its own sake, but to avoid having to test for this
3704 error case below. If we didn't, we might generate wrong code.
3706 For unsigned fields, the constant shifted right by the field length should
3707 be all zero. For signed fields, the high-order bits should agree with
3712 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3713 fold_convert (unsigned_type, rhs),
3714 size_int (lbitsize), 0)))
3716 warning (0, "comparison is always %d due to width of bit-field",
3718 return constant_boolean_node (code == NE_EXPR, compare_type);
3723 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3724 size_int (lbitsize - 1), 0);
3725 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3727 warning (0, "comparison is always %d due to width of bit-field",
3729 return constant_boolean_node (code == NE_EXPR, compare_type);
3733 /* Single-bit compares should always be against zero. */
3734 if (lbitsize == 1 && ! integer_zerop (rhs))
3736 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3737 rhs = build_int_cst (type, 0);
3740 /* Make a new bitfield reference, shift the constant over the
3741 appropriate number of bits and mask it with the computed mask
3742 (in case this was a signed field). If we changed it, make a new one. */
3743 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3746 TREE_SIDE_EFFECTS (lhs) = 1;
3747 TREE_THIS_VOLATILE (lhs) = 1;
3750 rhs = const_binop (BIT_AND_EXPR,
3751 const_binop (LSHIFT_EXPR,
3752 fold_convert (unsigned_type, rhs),
3753 size_int (lbitpos), 0),
3756 return build2 (code, compare_type,
3757 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3761 /* Subroutine for fold_truthop: decode a field reference.
3763 If EXP is a comparison reference, we return the innermost reference.
3765 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3766 set to the starting bit number.
3768 If the innermost field can be completely contained in a mode-sized
3769 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3771 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3772 otherwise it is not changed.
3774 *PUNSIGNEDP is set to the signedness of the field.
3776 *PMASK is set to the mask used. This is either contained in a
3777 BIT_AND_EXPR or derived from the width of the field.
3779 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3781 Return 0 if this is not a component reference or is one that we can't
3782 do anything with. */
3785 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3786 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3787 int *punsignedp, int *pvolatilep,
3788 tree *pmask, tree *pand_mask)
3790 tree outer_type = 0;
3792 tree mask, inner, offset;
3794 unsigned int precision;
3796 /* All the optimizations using this function assume integer fields.
3797 There are problems with FP fields since the type_for_size call
3798 below can fail for, e.g., XFmode. */
3799 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3802 /* We are interested in the bare arrangement of bits, so strip everything
3803 that doesn't affect the machine mode. However, record the type of the
3804 outermost expression if it may matter below. */
3805 if (TREE_CODE (exp) == NOP_EXPR
3806 || TREE_CODE (exp) == CONVERT_EXPR
3807 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3808 outer_type = TREE_TYPE (exp);
3811 if (TREE_CODE (exp) == BIT_AND_EXPR)
3813 and_mask = TREE_OPERAND (exp, 1);
3814 exp = TREE_OPERAND (exp, 0);
3815 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3816 if (TREE_CODE (and_mask) != INTEGER_CST)
3820 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3821 punsignedp, pvolatilep, false);
3822 if ((inner == exp && and_mask == 0)
3823 || *pbitsize < 0 || offset != 0
3824 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3827 /* If the number of bits in the reference is the same as the bitsize of
3828 the outer type, then the outer type gives the signedness. Otherwise
3829 (in case of a small bitfield) the signedness is unchanged. */
3830 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3831 *punsignedp = TYPE_UNSIGNED (outer_type);
3833 /* Compute the mask to access the bitfield. */
3834 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3835 precision = TYPE_PRECISION (unsigned_type);
3837 mask = build_int_cst_type (unsigned_type, -1);
3839 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3840 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3842 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3844 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3845 fold_convert (unsigned_type, and_mask), mask);
3848 *pand_mask = and_mask;
3852 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3856 all_ones_mask_p (tree mask, int size)
3858 tree type = TREE_TYPE (mask);
3859 unsigned int precision = TYPE_PRECISION (type);
3862 tmask = build_int_cst_type (signed_type_for (type), -1);
3865 tree_int_cst_equal (mask,
3866 const_binop (RSHIFT_EXPR,
3867 const_binop (LSHIFT_EXPR, tmask,
3868 size_int (precision - size),
3870 size_int (precision - size), 0));
3873 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3874 represents the sign bit of EXP's type. If EXP represents a sign
3875 or zero extension, also test VAL against the unextended type.
3876 The return value is the (sub)expression whose sign bit is VAL,
3877 or NULL_TREE otherwise. */
3880 sign_bit_p (tree exp, tree val)
3882 unsigned HOST_WIDE_INT mask_lo, lo;
3883 HOST_WIDE_INT mask_hi, hi;
3887 /* Tree EXP must have an integral type. */
3888 t = TREE_TYPE (exp);
3889 if (! INTEGRAL_TYPE_P (t))
3892 /* Tree VAL must be an integer constant. */
3893 if (TREE_CODE (val) != INTEGER_CST
3894 || TREE_OVERFLOW (val))
3897 width = TYPE_PRECISION (t);
3898 if (width > HOST_BITS_PER_WIDE_INT)
3900 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3903 mask_hi = ((unsigned HOST_WIDE_INT) -1
3904 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3910 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3913 mask_lo = ((unsigned HOST_WIDE_INT) -1
3914 >> (HOST_BITS_PER_WIDE_INT - width));
3917 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3918 treat VAL as if it were unsigned. */
3919 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3920 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3923 /* Handle extension from a narrower type. */
3924 if (TREE_CODE (exp) == NOP_EXPR
3925 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3926 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3931 /* Subroutine for fold_truthop: determine if an operand is simple enough
3932 to be evaluated unconditionally. */
3935 simple_operand_p (tree exp)
3937 /* Strip any conversions that don't change the machine mode. */
3940 return (CONSTANT_CLASS_P (exp)
3941 || TREE_CODE (exp) == SSA_NAME
3943 && ! TREE_ADDRESSABLE (exp)
3944 && ! TREE_THIS_VOLATILE (exp)
3945 && ! DECL_NONLOCAL (exp)
3946 /* Don't regard global variables as simple. They may be
3947 allocated in ways unknown to the compiler (shared memory,
3948 #pragma weak, etc). */
3949 && ! TREE_PUBLIC (exp)
3950 && ! DECL_EXTERNAL (exp)
3951 /* Loading a static variable is unduly expensive, but global
3952 registers aren't expensive. */
3953 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3956 /* The following functions are subroutines to fold_range_test and allow it to
3957 try to change a logical combination of comparisons into a range test.
3960 X == 2 || X == 3 || X == 4 || X == 5
3964 (unsigned) (X - 2) <= 3
3966 We describe each set of comparisons as being either inside or outside
3967 a range, using a variable named like IN_P, and then describe the
3968 range with a lower and upper bound. If one of the bounds is omitted,
3969 it represents either the highest or lowest value of the type.
3971 In the comments below, we represent a range by two numbers in brackets
3972 preceded by a "+" to designate being inside that range, or a "-" to
3973 designate being outside that range, so the condition can be inverted by
3974 flipping the prefix. An omitted bound is represented by a "-". For
3975 example, "- [-, 10]" means being outside the range starting at the lowest
3976 possible value and ending at 10, in other words, being greater than 10.
3977 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3980 We set up things so that the missing bounds are handled in a consistent
3981 manner so neither a missing bound nor "true" and "false" need to be
3982 handled using a special case. */
3984 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3985 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3986 and UPPER1_P are nonzero if the respective argument is an upper bound
3987 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3988 must be specified for a comparison. ARG1 will be converted to ARG0's
3989 type if both are specified. */
3992 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3993 tree arg1, int upper1_p)
3999 /* If neither arg represents infinity, do the normal operation.
4000 Else, if not a comparison, return infinity. Else handle the special
4001 comparison rules. Note that most of the cases below won't occur, but
4002 are handled for consistency. */
4004 if (arg0 != 0 && arg1 != 0)
4006 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4007 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4009 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4012 if (TREE_CODE_CLASS (code) != tcc_comparison)
4015 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4016 for neither. In real maths, we cannot assume open ended ranges are
4017 the same. But, this is computer arithmetic, where numbers are finite.
4018 We can therefore make the transformation of any unbounded range with
4019 the value Z, Z being greater than any representable number. This permits
4020 us to treat unbounded ranges as equal. */
4021 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4022 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4026 result = sgn0 == sgn1;
4029 result = sgn0 != sgn1;
4032 result = sgn0 < sgn1;
4035 result = sgn0 <= sgn1;
4038 result = sgn0 > sgn1;
4041 result = sgn0 >= sgn1;
4047 return constant_boolean_node (result, type);
4050 /* Given EXP, a logical expression, set the range it is testing into
4051 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4052 actually being tested. *PLOW and *PHIGH will be made of the same
4053 type as the returned expression. If EXP is not a comparison, we
4054 will most likely not be returning a useful value and range. Set
4055 *STRICT_OVERFLOW_P to true if the return value is only valid
4056 because signed overflow is undefined; otherwise, do not change
4057 *STRICT_OVERFLOW_P. */
4060 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4061 bool *strict_overflow_p)
4063 enum tree_code code;
4064 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4065 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4067 tree low, high, n_low, n_high;
4069 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4070 and see if we can refine the range. Some of the cases below may not
4071 happen, but it doesn't seem worth worrying about this. We "continue"
4072 the outer loop when we've changed something; otherwise we "break"
4073 the switch, which will "break" the while. */
4076 low = high = build_int_cst (TREE_TYPE (exp), 0);
4080 code = TREE_CODE (exp);
4081 exp_type = TREE_TYPE (exp);
4083 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4085 if (TREE_OPERAND_LENGTH (exp) > 0)
4086 arg0 = TREE_OPERAND (exp, 0);
4087 if (TREE_CODE_CLASS (code) == tcc_comparison
4088 || TREE_CODE_CLASS (code) == tcc_unary
4089 || TREE_CODE_CLASS (code) == tcc_binary)
4090 arg0_type = TREE_TYPE (arg0);
4091 if (TREE_CODE_CLASS (code) == tcc_binary
4092 || TREE_CODE_CLASS (code) == tcc_comparison
4093 || (TREE_CODE_CLASS (code) == tcc_expression
4094 && TREE_OPERAND_LENGTH (exp) > 1))
4095 arg1 = TREE_OPERAND (exp, 1);
4100 case TRUTH_NOT_EXPR:
4101 in_p = ! in_p, exp = arg0;
4104 case EQ_EXPR: case NE_EXPR:
4105 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4106 /* We can only do something if the range is testing for zero
4107 and if the second operand is an integer constant. Note that
4108 saying something is "in" the range we make is done by
4109 complementing IN_P since it will set in the initial case of
4110 being not equal to zero; "out" is leaving it alone. */
4111 if (low == 0 || high == 0
4112 || ! integer_zerop (low) || ! integer_zerop (high)
4113 || TREE_CODE (arg1) != INTEGER_CST)
4118 case NE_EXPR: /* - [c, c] */
4121 case EQ_EXPR: /* + [c, c] */
4122 in_p = ! in_p, low = high = arg1;
4124 case GT_EXPR: /* - [-, c] */
4125 low = 0, high = arg1;
4127 case GE_EXPR: /* + [c, -] */
4128 in_p = ! in_p, low = arg1, high = 0;
4130 case LT_EXPR: /* - [c, -] */
4131 low = arg1, high = 0;
4133 case LE_EXPR: /* + [-, c] */
4134 in_p = ! in_p, low = 0, high = arg1;
4140 /* If this is an unsigned comparison, we also know that EXP is
4141 greater than or equal to zero. We base the range tests we make
4142 on that fact, so we record it here so we can parse existing
4143 range tests. We test arg0_type since often the return type
4144 of, e.g. EQ_EXPR, is boolean. */
4145 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4147 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4149 build_int_cst (arg0_type, 0),
4153 in_p = n_in_p, low = n_low, high = n_high;
4155 /* If the high bound is missing, but we have a nonzero low
4156 bound, reverse the range so it goes from zero to the low bound
4158 if (high == 0 && low && ! integer_zerop (low))
4161 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4162 integer_one_node, 0);
4163 low = build_int_cst (arg0_type, 0);
4171 /* (-x) IN [a,b] -> x in [-b, -a] */
4172 n_low = range_binop (MINUS_EXPR, exp_type,
4173 build_int_cst (exp_type, 0),
4175 n_high = range_binop (MINUS_EXPR, exp_type,
4176 build_int_cst (exp_type, 0),
4178 low = n_low, high = n_high;
4184 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4185 build_int_cst (exp_type, 1));
4188 case PLUS_EXPR: case MINUS_EXPR:
4189 if (TREE_CODE (arg1) != INTEGER_CST)
4192 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4193 move a constant to the other side. */
4194 if (!TYPE_UNSIGNED (arg0_type)
4195 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4198 /* If EXP is signed, any overflow in the computation is undefined,
4199 so we don't worry about it so long as our computations on
4200 the bounds don't overflow. For unsigned, overflow is defined
4201 and this is exactly the right thing. */
4202 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4203 arg0_type, low, 0, arg1, 0);
4204 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4205 arg0_type, high, 1, arg1, 0);
4206 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4207 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4210 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4211 *strict_overflow_p = true;
4213 /* Check for an unsigned range which has wrapped around the maximum
4214 value thus making n_high < n_low, and normalize it. */
4215 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4217 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4218 integer_one_node, 0);
4219 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4220 integer_one_node, 0);
4222 /* If the range is of the form +/- [ x+1, x ], we won't
4223 be able to normalize it. But then, it represents the
4224 whole range or the empty set, so make it
4226 if (tree_int_cst_equal (n_low, low)
4227 && tree_int_cst_equal (n_high, high))
4233 low = n_low, high = n_high;
4238 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4239 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4242 if (! INTEGRAL_TYPE_P (arg0_type)
4243 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4244 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4247 n_low = low, n_high = high;
4250 n_low = fold_convert (arg0_type, n_low);
4253 n_high = fold_convert (arg0_type, n_high);
4256 /* If we're converting arg0 from an unsigned type, to exp,
4257 a signed type, we will be doing the comparison as unsigned.
4258 The tests above have already verified that LOW and HIGH
4261 So we have to ensure that we will handle large unsigned
4262 values the same way that the current signed bounds treat
4265 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4268 tree equiv_type = lang_hooks.types.type_for_mode
4269 (TYPE_MODE (arg0_type), 1);
4271 /* A range without an upper bound is, naturally, unbounded.
4272 Since convert would have cropped a very large value, use
4273 the max value for the destination type. */
4275 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4276 : TYPE_MAX_VALUE (arg0_type);
4278 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4279 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4280 fold_convert (arg0_type,
4282 build_int_cst (arg0_type, 1));
4284 /* If the low bound is specified, "and" the range with the
4285 range for which the original unsigned value will be
4289 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4290 1, n_low, n_high, 1,
4291 fold_convert (arg0_type,
4296 in_p = (n_in_p == in_p);
4300 /* Otherwise, "or" the range with the range of the input
4301 that will be interpreted as negative. */
4302 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4303 0, n_low, n_high, 1,
4304 fold_convert (arg0_type,
4309 in_p = (in_p != n_in_p);
4314 low = n_low, high = n_high;
4324 /* If EXP is a constant, we can evaluate whether this is true or false. */
4325 if (TREE_CODE (exp) == INTEGER_CST)
4327 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4329 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4335 *pin_p = in_p, *plow = low, *phigh = high;
4339 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4340 type, TYPE, return an expression to test if EXP is in (or out of, depending
4341 on IN_P) the range. Return 0 if the test couldn't be created. */
4344 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4346 tree etype = TREE_TYPE (exp);
4349 #ifdef HAVE_canonicalize_funcptr_for_compare
4350 /* Disable this optimization for function pointer expressions
4351 on targets that require function pointer canonicalization. */
4352 if (HAVE_canonicalize_funcptr_for_compare
4353 && TREE_CODE (etype) == POINTER_TYPE
4354 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4360 value = build_range_check (type, exp, 1, low, high);
4362 return invert_truthvalue (value);
4367 if (low == 0 && high == 0)
4368 return build_int_cst (type, 1);
4371 return fold_build2 (LE_EXPR, type, exp,
4372 fold_convert (etype, high));
4375 return fold_build2 (GE_EXPR, type, exp,
4376 fold_convert (etype, low));
4378 if (operand_equal_p (low, high, 0))
4379 return fold_build2 (EQ_EXPR, type, exp,
4380 fold_convert (etype, low));
4382 if (integer_zerop (low))
4384 if (! TYPE_UNSIGNED (etype))
4386 etype = unsigned_type_for (etype);
4387 high = fold_convert (etype, high);
4388 exp = fold_convert (etype, exp);
4390 return build_range_check (type, exp, 1, 0, high);
4393 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4394 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4396 unsigned HOST_WIDE_INT lo;
4400 prec = TYPE_PRECISION (etype);
4401 if (prec <= HOST_BITS_PER_WIDE_INT)
4404 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4408 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4409 lo = (unsigned HOST_WIDE_INT) -1;
4412 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4414 if (TYPE_UNSIGNED (etype))
4416 etype = signed_type_for (etype);
4417 exp = fold_convert (etype, exp);
4419 return fold_build2 (GT_EXPR, type, exp,
4420 build_int_cst (etype, 0));
4424 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4425 This requires wrap-around arithmetics for the type of the expression. */
4426 switch (TREE_CODE (etype))
4429 /* There is no requirement that LOW be within the range of ETYPE
4430 if the latter is a subtype. It must, however, be within the base
4431 type of ETYPE. So be sure we do the subtraction in that type. */
4432 if (TREE_TYPE (etype))
4433 etype = TREE_TYPE (etype);
4438 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4439 TYPE_UNSIGNED (etype));
4446 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4447 if (TREE_CODE (etype) == INTEGER_TYPE
4448 && !TYPE_OVERFLOW_WRAPS (etype))
4450 tree utype, minv, maxv;
4452 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4453 for the type in question, as we rely on this here. */
4454 utype = unsigned_type_for (etype);
4455 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4456 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4457 integer_one_node, 1);
4458 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4460 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4467 high = fold_convert (etype, high);
4468 low = fold_convert (etype, low);
4469 exp = fold_convert (etype, exp);
4471 value = const_binop (MINUS_EXPR, high, low, 0);
4474 if (POINTER_TYPE_P (etype))
4476 if (value != 0 && !TREE_OVERFLOW (value))
4478 low = fold_convert (sizetype, low);
4479 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4480 return build_range_check (type,
4481 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4482 1, build_int_cst (etype, 0), value);
4487 if (value != 0 && !TREE_OVERFLOW (value))
4488 return build_range_check (type,
4489 fold_build2 (MINUS_EXPR, etype, exp, low),
4490 1, build_int_cst (etype, 0), value);
4495 /* Return the predecessor of VAL in its type, handling the infinite case. */
4498 range_predecessor (tree val)
4500 tree type = TREE_TYPE (val);
4502 if (INTEGRAL_TYPE_P (type)
4503 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4506 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4509 /* Return the successor of VAL in its type, handling the infinite case. */
4512 range_successor (tree val)
4514 tree type = TREE_TYPE (val);
4516 if (INTEGRAL_TYPE_P (type)
4517 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4520 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4523 /* Given two ranges, see if we can merge them into one. Return 1 if we
4524 can, 0 if we can't. Set the output range into the specified parameters. */
4527 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4528 tree high0, int in1_p, tree low1, tree high1)
4536 int lowequal = ((low0 == 0 && low1 == 0)
4537 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4538 low0, 0, low1, 0)));
4539 int highequal = ((high0 == 0 && high1 == 0)
4540 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4541 high0, 1, high1, 1)));
4543 /* Make range 0 be the range that starts first, or ends last if they
4544 start at the same value. Swap them if it isn't. */
4545 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4548 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4549 high1, 1, high0, 1))))
4551 temp = in0_p, in0_p = in1_p, in1_p = temp;
4552 tem = low0, low0 = low1, low1 = tem;
4553 tem = high0, high0 = high1, high1 = tem;
4556 /* Now flag two cases, whether the ranges are disjoint or whether the
4557 second range is totally subsumed in the first. Note that the tests
4558 below are simplified by the ones above. */
4559 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4560 high0, 1, low1, 0));
4561 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4562 high1, 1, high0, 1));
4564 /* We now have four cases, depending on whether we are including or
4565 excluding the two ranges. */
4568 /* If they don't overlap, the result is false. If the second range
4569 is a subset it is the result. Otherwise, the range is from the start
4570 of the second to the end of the first. */
4572 in_p = 0, low = high = 0;
4574 in_p = 1, low = low1, high = high1;
4576 in_p = 1, low = low1, high = high0;
4579 else if (in0_p && ! in1_p)
4581 /* If they don't overlap, the result is the first range. If they are
4582 equal, the result is false. If the second range is a subset of the
4583 first, and the ranges begin at the same place, we go from just after
4584 the end of the second range to the end of the first. If the second
4585 range is not a subset of the first, or if it is a subset and both
4586 ranges end at the same place, the range starts at the start of the
4587 first range and ends just before the second range.
4588 Otherwise, we can't describe this as a single range. */
4590 in_p = 1, low = low0, high = high0;
4591 else if (lowequal && highequal)
4592 in_p = 0, low = high = 0;
4593 else if (subset && lowequal)
4595 low = range_successor (high1);
4600 /* We are in the weird situation where high0 > high1 but
4601 high1 has no successor. Punt. */
4605 else if (! subset || highequal)
4608 high = range_predecessor (low1);
4612 /* low0 < low1 but low1 has no predecessor. Punt. */
4620 else if (! in0_p && in1_p)
4622 /* If they don't overlap, the result is the second range. If the second
4623 is a subset of the first, the result is false. Otherwise,
4624 the range starts just after the first range and ends at the
4625 end of the second. */
4627 in_p = 1, low = low1, high = high1;
4628 else if (subset || highequal)
4629 in_p = 0, low = high = 0;
4632 low = range_successor (high0);
4637 /* high1 > high0 but high0 has no successor. Punt. */
4645 /* The case where we are excluding both ranges. Here the complex case
4646 is if they don't overlap. In that case, the only time we have a
4647 range is if they are adjacent. If the second is a subset of the
4648 first, the result is the first. Otherwise, the range to exclude
4649 starts at the beginning of the first range and ends at the end of the
4653 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4654 range_successor (high0),
4656 in_p = 0, low = low0, high = high1;
4659 /* Canonicalize - [min, x] into - [-, x]. */
4660 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4661 switch (TREE_CODE (TREE_TYPE (low0)))
4664 if (TYPE_PRECISION (TREE_TYPE (low0))
4665 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4669 if (tree_int_cst_equal (low0,
4670 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4674 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4675 && integer_zerop (low0))
4682 /* Canonicalize - [x, max] into - [x, -]. */
4683 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4684 switch (TREE_CODE (TREE_TYPE (high1)))
4687 if (TYPE_PRECISION (TREE_TYPE (high1))
4688 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4692 if (tree_int_cst_equal (high1,
4693 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4697 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4698 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4700 integer_one_node, 1)))
4707 /* The ranges might be also adjacent between the maximum and
4708 minimum values of the given type. For
4709 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4710 return + [x + 1, y - 1]. */
4711 if (low0 == 0 && high1 == 0)
4713 low = range_successor (high0);
4714 high = range_predecessor (low1);
4715 if (low == 0 || high == 0)
4725 in_p = 0, low = low0, high = high0;
4727 in_p = 0, low = low0, high = high1;
4730 *pin_p = in_p, *plow = low, *phigh = high;
4735 /* Subroutine of fold, looking inside expressions of the form
4736 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4737 of the COND_EXPR. This function is being used also to optimize
4738 A op B ? C : A, by reversing the comparison first.
4740 Return a folded expression whose code is not a COND_EXPR
4741 anymore, or NULL_TREE if no folding opportunity is found. */
4744 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4746 enum tree_code comp_code = TREE_CODE (arg0);
4747 tree arg00 = TREE_OPERAND (arg0, 0);
4748 tree arg01 = TREE_OPERAND (arg0, 1);
4749 tree arg1_type = TREE_TYPE (arg1);
4755 /* If we have A op 0 ? A : -A, consider applying the following
4758 A == 0? A : -A same as -A
4759 A != 0? A : -A same as A
4760 A >= 0? A : -A same as abs (A)
4761 A > 0? A : -A same as abs (A)
4762 A <= 0? A : -A same as -abs (A)
4763 A < 0? A : -A same as -abs (A)
4765 None of these transformations work for modes with signed
4766 zeros. If A is +/-0, the first two transformations will
4767 change the sign of the result (from +0 to -0, or vice
4768 versa). The last four will fix the sign of the result,
4769 even though the original expressions could be positive or
4770 negative, depending on the sign of A.
4772 Note that all these transformations are correct if A is
4773 NaN, since the two alternatives (A and -A) are also NaNs. */
4774 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4775 ? real_zerop (arg01)
4776 : integer_zerop (arg01))
4777 && ((TREE_CODE (arg2) == NEGATE_EXPR
4778 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4779 /* In the case that A is of the form X-Y, '-A' (arg2) may
4780 have already been folded to Y-X, check for that. */
4781 || (TREE_CODE (arg1) == MINUS_EXPR
4782 && TREE_CODE (arg2) == MINUS_EXPR
4783 && operand_equal_p (TREE_OPERAND (arg1, 0),
4784 TREE_OPERAND (arg2, 1), 0)
4785 && operand_equal_p (TREE_OPERAND (arg1, 1),
4786 TREE_OPERAND (arg2, 0), 0))))
4791 tem = fold_convert (arg1_type, arg1);
4792 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4795 return pedantic_non_lvalue (fold_convert (type, arg1));
4798 if (flag_trapping_math)
4803 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4804 arg1 = fold_convert (signed_type_for
4805 (TREE_TYPE (arg1)), arg1);
4806 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4807 return pedantic_non_lvalue (fold_convert (type, tem));
4810 if (flag_trapping_math)
4814 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4815 arg1 = fold_convert (signed_type_for
4816 (TREE_TYPE (arg1)), arg1);
4817 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4818 return negate_expr (fold_convert (type, tem));
4820 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4824 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4825 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4826 both transformations are correct when A is NaN: A != 0
4827 is then true, and A == 0 is false. */
4829 if (integer_zerop (arg01) && integer_zerop (arg2))
4831 if (comp_code == NE_EXPR)
4832 return pedantic_non_lvalue (fold_convert (type, arg1));
4833 else if (comp_code == EQ_EXPR)
4834 return build_int_cst (type, 0);
4837 /* Try some transformations of A op B ? A : B.
4839 A == B? A : B same as B
4840 A != B? A : B same as A
4841 A >= B? A : B same as max (A, B)
4842 A > B? A : B same as max (B, A)
4843 A <= B? A : B same as min (A, B)
4844 A < B? A : B same as min (B, A)
4846 As above, these transformations don't work in the presence
4847 of signed zeros. For example, if A and B are zeros of
4848 opposite sign, the first two transformations will change
4849 the sign of the result. In the last four, the original
4850 expressions give different results for (A=+0, B=-0) and
4851 (A=-0, B=+0), but the transformed expressions do not.
4853 The first two transformations are correct if either A or B
4854 is a NaN. In the first transformation, the condition will
4855 be false, and B will indeed be chosen. In the case of the
4856 second transformation, the condition A != B will be true,
4857 and A will be chosen.
4859 The conversions to max() and min() are not correct if B is
4860 a number and A is not. The conditions in the original
4861 expressions will be false, so all four give B. The min()
4862 and max() versions would give a NaN instead. */
4863 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4864 /* Avoid these transformations if the COND_EXPR may be used
4865 as an lvalue in the C++ front-end. PR c++/19199. */
4867 || (strcmp (lang_hooks.name, "GNU C++") != 0
4868 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4869 || ! maybe_lvalue_p (arg1)
4870 || ! maybe_lvalue_p (arg2)))
4872 tree comp_op0 = arg00;
4873 tree comp_op1 = arg01;
4874 tree comp_type = TREE_TYPE (comp_op0);
4876 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4877 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4887 return pedantic_non_lvalue (fold_convert (type, arg2));
4889 return pedantic_non_lvalue (fold_convert (type, arg1));
4894 /* In C++ a ?: expression can be an lvalue, so put the
4895 operand which will be used if they are equal first
4896 so that we can convert this back to the
4897 corresponding COND_EXPR. */
4898 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4900 comp_op0 = fold_convert (comp_type, comp_op0);
4901 comp_op1 = fold_convert (comp_type, comp_op1);
4902 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4903 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4904 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4905 return pedantic_non_lvalue (fold_convert (type, tem));
4912 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4914 comp_op0 = fold_convert (comp_type, comp_op0);
4915 comp_op1 = fold_convert (comp_type, comp_op1);
4916 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4917 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4918 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4919 return pedantic_non_lvalue (fold_convert (type, tem));
4923 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4924 return pedantic_non_lvalue (fold_convert (type, arg2));
4927 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4928 return pedantic_non_lvalue (fold_convert (type, arg1));
4931 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4936 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4937 we might still be able to simplify this. For example,
4938 if C1 is one less or one more than C2, this might have started
4939 out as a MIN or MAX and been transformed by this function.
4940 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4942 if (INTEGRAL_TYPE_P (type)
4943 && TREE_CODE (arg01) == INTEGER_CST
4944 && TREE_CODE (arg2) == INTEGER_CST)
4948 /* We can replace A with C1 in this case. */
4949 arg1 = fold_convert (type, arg01);
4950 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4953 /* If C1 is C2 + 1, this is min(A, C2). */
4954 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4956 && operand_equal_p (arg01,
4957 const_binop (PLUS_EXPR, arg2,
4958 build_int_cst (type, 1), 0),
4960 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4962 fold_convert (type, arg1),
4967 /* If C1 is C2 - 1, this is min(A, C2). */
4968 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4970 && operand_equal_p (arg01,
4971 const_binop (MINUS_EXPR, arg2,
4972 build_int_cst (type, 1), 0),
4974 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4976 fold_convert (type, arg1),
4981 /* If C1 is C2 - 1, this is max(A, C2). */
4982 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4984 && operand_equal_p (arg01,
4985 const_binop (MINUS_EXPR, arg2,
4986 build_int_cst (type, 1), 0),
4988 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4990 fold_convert (type, arg1),
4995 /* If C1 is C2 + 1, this is max(A, C2). */
4996 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4998 && operand_equal_p (arg01,
4999 const_binop (PLUS_EXPR, arg2,
5000 build_int_cst (type, 1), 0),
5002 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5004 fold_convert (type, arg1),
5018 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5019 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5022 /* EXP is some logical combination of boolean tests. See if we can
5023 merge it into some range test. Return the new tree if so. */
5026 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5028 int or_op = (code == TRUTH_ORIF_EXPR
5029 || code == TRUTH_OR_EXPR);
5030 int in0_p, in1_p, in_p;
5031 tree low0, low1, low, high0, high1, high;
5032 bool strict_overflow_p = false;
5033 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5034 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5036 const char * const warnmsg = G_("assuming signed overflow does not occur "
5037 "when simplifying range test");
5039 /* If this is an OR operation, invert both sides; we will invert
5040 again at the end. */
5042 in0_p = ! in0_p, in1_p = ! in1_p;
5044 /* If both expressions are the same, if we can merge the ranges, and we
5045 can build the range test, return it or it inverted. If one of the
5046 ranges is always true or always false, consider it to be the same
5047 expression as the other. */
5048 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5049 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5051 && 0 != (tem = (build_range_check (type,
5053 : rhs != 0 ? rhs : integer_zero_node,
5056 if (strict_overflow_p)
5057 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5058 return or_op ? invert_truthvalue (tem) : tem;
5061 /* On machines where the branch cost is expensive, if this is a
5062 short-circuited branch and the underlying object on both sides
5063 is the same, make a non-short-circuit operation. */
5064 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5065 && lhs != 0 && rhs != 0
5066 && (code == TRUTH_ANDIF_EXPR
5067 || code == TRUTH_ORIF_EXPR)
5068 && operand_equal_p (lhs, rhs, 0))
5070 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5071 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5072 which cases we can't do this. */
5073 if (simple_operand_p (lhs))
5074 return build2 (code == TRUTH_ANDIF_EXPR
5075 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5078 else if (lang_hooks.decls.global_bindings_p () == 0
5079 && ! CONTAINS_PLACEHOLDER_P (lhs))
5081 tree common = save_expr (lhs);
5083 if (0 != (lhs = build_range_check (type, common,
5084 or_op ? ! in0_p : in0_p,
5086 && (0 != (rhs = build_range_check (type, common,
5087 or_op ? ! in1_p : in1_p,
5090 if (strict_overflow_p)
5091 fold_overflow_warning (warnmsg,
5092 WARN_STRICT_OVERFLOW_COMPARISON);
5093 return build2 (code == TRUTH_ANDIF_EXPR
5094 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5103 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5104 bit value. Arrange things so the extra bits will be set to zero if and
5105 only if C is signed-extended to its full width. If MASK is nonzero,
5106 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5109 unextend (tree c, int p, int unsignedp, tree mask)
5111 tree type = TREE_TYPE (c);
5112 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5115 if (p == modesize || unsignedp)
5118 /* We work by getting just the sign bit into the low-order bit, then
5119 into the high-order bit, then sign-extend. We then XOR that value
5121 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5122 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5124 /* We must use a signed type in order to get an arithmetic right shift.
5125 However, we must also avoid introducing accidental overflows, so that
5126 a subsequent call to integer_zerop will work. Hence we must
5127 do the type conversion here. At this point, the constant is either
5128 zero or one, and the conversion to a signed type can never overflow.
5129 We could get an overflow if this conversion is done anywhere else. */
5130 if (TYPE_UNSIGNED (type))
5131 temp = fold_convert (signed_type_for (type), temp);
5133 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5134 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5136 temp = const_binop (BIT_AND_EXPR, temp,
5137 fold_convert (TREE_TYPE (c), mask), 0);
5138 /* If necessary, convert the type back to match the type of C. */
5139 if (TYPE_UNSIGNED (type))
5140 temp = fold_convert (type, temp);
5142 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5145 /* Find ways of folding logical expressions of LHS and RHS:
5146 Try to merge two comparisons to the same innermost item.
5147 Look for range tests like "ch >= '0' && ch <= '9'".
5148 Look for combinations of simple terms on machines with expensive branches
5149 and evaluate the RHS unconditionally.
5151 For example, if we have p->a == 2 && p->b == 4 and we can make an
5152 object large enough to span both A and B, we can do this with a comparison
5153 against the object ANDed with the a mask.
5155 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5156 operations to do this with one comparison.
5158 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5159 function and the one above.
5161 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5162 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5164 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5167 We return the simplified tree or 0 if no optimization is possible. */
5170 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5172 /* If this is the "or" of two comparisons, we can do something if
5173 the comparisons are NE_EXPR. If this is the "and", we can do something
5174 if the comparisons are EQ_EXPR. I.e.,
5175 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5177 WANTED_CODE is this operation code. For single bit fields, we can
5178 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5179 comparison for one-bit fields. */
5181 enum tree_code wanted_code;
5182 enum tree_code lcode, rcode;
5183 tree ll_arg, lr_arg, rl_arg, rr_arg;
5184 tree ll_inner, lr_inner, rl_inner, rr_inner;
5185 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5186 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5187 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5188 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5189 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5190 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5191 enum machine_mode lnmode, rnmode;
5192 tree ll_mask, lr_mask, rl_mask, rr_mask;
5193 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5194 tree l_const, r_const;
5195 tree lntype, rntype, result;
5196 int first_bit, end_bit;
5198 tree orig_lhs = lhs, orig_rhs = rhs;
5199 enum tree_code orig_code = code;
5201 /* Start by getting the comparison codes. Fail if anything is volatile.
5202 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5203 it were surrounded with a NE_EXPR. */
5205 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5208 lcode = TREE_CODE (lhs);
5209 rcode = TREE_CODE (rhs);
5211 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5213 lhs = build2 (NE_EXPR, truth_type, lhs,
5214 build_int_cst (TREE_TYPE (lhs), 0));
5218 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5220 rhs = build2 (NE_EXPR, truth_type, rhs,
5221 build_int_cst (TREE_TYPE (rhs), 0));
5225 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5226 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5229 ll_arg = TREE_OPERAND (lhs, 0);
5230 lr_arg = TREE_OPERAND (lhs, 1);
5231 rl_arg = TREE_OPERAND (rhs, 0);
5232 rr_arg = TREE_OPERAND (rhs, 1);
5234 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5235 if (simple_operand_p (ll_arg)
5236 && simple_operand_p (lr_arg))
5239 if (operand_equal_p (ll_arg, rl_arg, 0)
5240 && operand_equal_p (lr_arg, rr_arg, 0))
5242 result = combine_comparisons (code, lcode, rcode,
5243 truth_type, ll_arg, lr_arg);
5247 else if (operand_equal_p (ll_arg, rr_arg, 0)
5248 && operand_equal_p (lr_arg, rl_arg, 0))
5250 result = combine_comparisons (code, lcode,
5251 swap_tree_comparison (rcode),
5252 truth_type, ll_arg, lr_arg);
5258 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5259 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5261 /* If the RHS can be evaluated unconditionally and its operands are
5262 simple, it wins to evaluate the RHS unconditionally on machines
5263 with expensive branches. In this case, this isn't a comparison
5264 that can be merged. Avoid doing this if the RHS is a floating-point
5265 comparison since those can trap. */
5267 if (BRANCH_COST >= 2
5268 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5269 && simple_operand_p (rl_arg)
5270 && simple_operand_p (rr_arg))
5272 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5273 if (code == TRUTH_OR_EXPR
5274 && lcode == NE_EXPR && integer_zerop (lr_arg)
5275 && rcode == NE_EXPR && integer_zerop (rr_arg)
5276 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5277 return build2 (NE_EXPR, truth_type,
5278 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5280 build_int_cst (TREE_TYPE (ll_arg), 0));
5282 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5283 if (code == TRUTH_AND_EXPR
5284 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5285 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5286 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5287 return build2 (EQ_EXPR, truth_type,
5288 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5290 build_int_cst (TREE_TYPE (ll_arg), 0));
5292 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5294 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5295 return build2 (code, truth_type, lhs, rhs);
5300 /* See if the comparisons can be merged. Then get all the parameters for
5303 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5304 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5308 ll_inner = decode_field_reference (ll_arg,
5309 &ll_bitsize, &ll_bitpos, &ll_mode,
5310 &ll_unsignedp, &volatilep, &ll_mask,
5312 lr_inner = decode_field_reference (lr_arg,
5313 &lr_bitsize, &lr_bitpos, &lr_mode,
5314 &lr_unsignedp, &volatilep, &lr_mask,
5316 rl_inner = decode_field_reference (rl_arg,
5317 &rl_bitsize, &rl_bitpos, &rl_mode,
5318 &rl_unsignedp, &volatilep, &rl_mask,
5320 rr_inner = decode_field_reference (rr_arg,
5321 &rr_bitsize, &rr_bitpos, &rr_mode,
5322 &rr_unsignedp, &volatilep, &rr_mask,
5325 /* It must be true that the inner operation on the lhs of each
5326 comparison must be the same if we are to be able to do anything.
5327 Then see if we have constants. If not, the same must be true for
5329 if (volatilep || ll_inner == 0 || rl_inner == 0
5330 || ! operand_equal_p (ll_inner, rl_inner, 0))
5333 if (TREE_CODE (lr_arg) == INTEGER_CST
5334 && TREE_CODE (rr_arg) == INTEGER_CST)
5335 l_const = lr_arg, r_const = rr_arg;
5336 else if (lr_inner == 0 || rr_inner == 0
5337 || ! operand_equal_p (lr_inner, rr_inner, 0))
5340 l_const = r_const = 0;
5342 /* If either comparison code is not correct for our logical operation,
5343 fail. However, we can convert a one-bit comparison against zero into
5344 the opposite comparison against that bit being set in the field. */
5346 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5347 if (lcode != wanted_code)
5349 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5351 /* Make the left operand unsigned, since we are only interested
5352 in the value of one bit. Otherwise we are doing the wrong
5361 /* This is analogous to the code for l_const above. */
5362 if (rcode != wanted_code)
5364 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5373 /* See if we can find a mode that contains both fields being compared on
5374 the left. If we can't, fail. Otherwise, update all constants and masks
5375 to be relative to a field of that size. */
5376 first_bit = MIN (ll_bitpos, rl_bitpos);
5377 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5378 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5379 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5381 if (lnmode == VOIDmode)
5384 lnbitsize = GET_MODE_BITSIZE (lnmode);
5385 lnbitpos = first_bit & ~ (lnbitsize - 1);
5386 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5387 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5389 if (BYTES_BIG_ENDIAN)
5391 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5392 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5395 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5396 size_int (xll_bitpos), 0);
5397 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5398 size_int (xrl_bitpos), 0);
5402 l_const = fold_convert (lntype, l_const);
5403 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5404 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5405 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5406 fold_build1 (BIT_NOT_EXPR,
5410 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5412 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5417 r_const = fold_convert (lntype, r_const);
5418 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5419 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5420 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5421 fold_build1 (BIT_NOT_EXPR,
5425 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5427 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5431 /* If the right sides are not constant, do the same for it. Also,
5432 disallow this optimization if a size or signedness mismatch occurs
5433 between the left and right sides. */
5436 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5437 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5438 /* Make sure the two fields on the right
5439 correspond to the left without being swapped. */
5440 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5443 first_bit = MIN (lr_bitpos, rr_bitpos);
5444 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5445 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5446 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5448 if (rnmode == VOIDmode)
5451 rnbitsize = GET_MODE_BITSIZE (rnmode);
5452 rnbitpos = first_bit & ~ (rnbitsize - 1);
5453 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5454 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5456 if (BYTES_BIG_ENDIAN)
5458 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5459 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5462 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5463 size_int (xlr_bitpos), 0);
5464 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5465 size_int (xrr_bitpos), 0);
5467 /* Make a mask that corresponds to both fields being compared.
5468 Do this for both items being compared. If the operands are the
5469 same size and the bits being compared are in the same position
5470 then we can do this by masking both and comparing the masked
5472 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5473 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5474 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5476 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5477 ll_unsignedp || rl_unsignedp);
5478 if (! all_ones_mask_p (ll_mask, lnbitsize))
5479 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5481 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5482 lr_unsignedp || rr_unsignedp);
5483 if (! all_ones_mask_p (lr_mask, rnbitsize))
5484 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5486 return build2 (wanted_code, truth_type, lhs, rhs);
5489 /* There is still another way we can do something: If both pairs of
5490 fields being compared are adjacent, we may be able to make a wider
5491 field containing them both.
5493 Note that we still must mask the lhs/rhs expressions. Furthermore,
5494 the mask must be shifted to account for the shift done by
5495 make_bit_field_ref. */
5496 if ((ll_bitsize + ll_bitpos == rl_bitpos
5497 && lr_bitsize + lr_bitpos == rr_bitpos)
5498 || (ll_bitpos == rl_bitpos + rl_bitsize
5499 && lr_bitpos == rr_bitpos + rr_bitsize))
5503 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5504 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5505 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5506 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5508 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5509 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5510 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5511 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5513 /* Convert to the smaller type before masking out unwanted bits. */
5515 if (lntype != rntype)
5517 if (lnbitsize > rnbitsize)
5519 lhs = fold_convert (rntype, lhs);
5520 ll_mask = fold_convert (rntype, ll_mask);
5523 else if (lnbitsize < rnbitsize)
5525 rhs = fold_convert (lntype, rhs);
5526 lr_mask = fold_convert (lntype, lr_mask);
5531 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5532 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5534 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5535 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5537 return build2 (wanted_code, truth_type, lhs, rhs);
5543 /* Handle the case of comparisons with constants. If there is something in
5544 common between the masks, those bits of the constants must be the same.
5545 If not, the condition is always false. Test for this to avoid generating
5546 incorrect code below. */
5547 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5548 if (! integer_zerop (result)
5549 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5550 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5552 if (wanted_code == NE_EXPR)
5554 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5555 return constant_boolean_node (true, truth_type);
5559 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5560 return constant_boolean_node (false, truth_type);
5564 /* Construct the expression we will return. First get the component
5565 reference we will make. Unless the mask is all ones the width of
5566 that field, perform the mask operation. Then compare with the
5568 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5569 ll_unsignedp || rl_unsignedp);
5571 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5572 if (! all_ones_mask_p (ll_mask, lnbitsize))
5573 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5575 return build2 (wanted_code, truth_type, result,
5576 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5579 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5583 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5586 enum tree_code op_code;
5587 tree comp_const = op1;
5589 int consts_equal, consts_lt;
5592 STRIP_SIGN_NOPS (arg0);
5594 op_code = TREE_CODE (arg0);
5595 minmax_const = TREE_OPERAND (arg0, 1);
5596 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5597 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5598 inner = TREE_OPERAND (arg0, 0);
5600 /* If something does not permit us to optimize, return the original tree. */
5601 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5602 || TREE_CODE (comp_const) != INTEGER_CST
5603 || TREE_OVERFLOW (comp_const)
5604 || TREE_CODE (minmax_const) != INTEGER_CST
5605 || TREE_OVERFLOW (minmax_const))
5608 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5609 and GT_EXPR, doing the rest with recursive calls using logical
5613 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5615 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5618 return invert_truthvalue (tem);
5624 fold_build2 (TRUTH_ORIF_EXPR, type,
5625 optimize_minmax_comparison
5626 (EQ_EXPR, type, arg0, comp_const),
5627 optimize_minmax_comparison
5628 (GT_EXPR, type, arg0, comp_const));
5631 if (op_code == MAX_EXPR && consts_equal)
5632 /* MAX (X, 0) == 0 -> X <= 0 */
5633 return fold_build2 (LE_EXPR, type, inner, comp_const);
5635 else if (op_code == MAX_EXPR && consts_lt)
5636 /* MAX (X, 0) == 5 -> X == 5 */
5637 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5639 else if (op_code == MAX_EXPR)
5640 /* MAX (X, 0) == -1 -> false */
5641 return omit_one_operand (type, integer_zero_node, inner);
5643 else if (consts_equal)
5644 /* MIN (X, 0) == 0 -> X >= 0 */
5645 return fold_build2 (GE_EXPR, type, inner, comp_const);
5648 /* MIN (X, 0) == 5 -> false */
5649 return omit_one_operand (type, integer_zero_node, inner);
5652 /* MIN (X, 0) == -1 -> X == -1 */
5653 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5656 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5657 /* MAX (X, 0) > 0 -> X > 0
5658 MAX (X, 0) > 5 -> X > 5 */
5659 return fold_build2 (GT_EXPR, type, inner, comp_const);
5661 else if (op_code == MAX_EXPR)
5662 /* MAX (X, 0) > -1 -> true */
5663 return omit_one_operand (type, integer_one_node, inner);
5665 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5666 /* MIN (X, 0) > 0 -> false
5667 MIN (X, 0) > 5 -> false */
5668 return omit_one_operand (type, integer_zero_node, inner);
5671 /* MIN (X, 0) > -1 -> X > -1 */
5672 return fold_build2 (GT_EXPR, type, inner, comp_const);
5679 /* T is an integer expression that is being multiplied, divided, or taken a
5680 modulus (CODE says which and what kind of divide or modulus) by a
5681 constant C. See if we can eliminate that operation by folding it with
5682 other operations already in T. WIDE_TYPE, if non-null, is a type that
5683 should be used for the computation if wider than our type.
5685 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5686 (X * 2) + (Y * 4). We must, however, be assured that either the original
5687 expression would not overflow or that overflow is undefined for the type
5688 in the language in question.
5690 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5691 the machine has a multiply-accumulate insn or that this is part of an
5692 addressing calculation.
5694 If we return a non-null expression, it is an equivalent form of the
5695 original computation, but need not be in the original type.
5697 We set *STRICT_OVERFLOW_P to true if the return values depends on
5698 signed overflow being undefined. Otherwise we do not change
5699 *STRICT_OVERFLOW_P. */
5702 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5703 bool *strict_overflow_p)
5705 /* To avoid exponential search depth, refuse to allow recursion past
5706 three levels. Beyond that (1) it's highly unlikely that we'll find
5707 something interesting and (2) we've probably processed it before
5708 when we built the inner expression. */
5717 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5724 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5725 bool *strict_overflow_p)
5727 tree type = TREE_TYPE (t);
5728 enum tree_code tcode = TREE_CODE (t);
5729 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5730 > GET_MODE_SIZE (TYPE_MODE (type)))
5731 ? wide_type : type);
5733 int same_p = tcode == code;
5734 tree op0 = NULL_TREE, op1 = NULL_TREE;
5735 bool sub_strict_overflow_p;
5737 /* Don't deal with constants of zero here; they confuse the code below. */
5738 if (integer_zerop (c))
5741 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5742 op0 = TREE_OPERAND (t, 0);
5744 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5745 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5747 /* Note that we need not handle conditional operations here since fold
5748 already handles those cases. So just do arithmetic here. */
5752 /* For a constant, we can always simplify if we are a multiply
5753 or (for divide and modulus) if it is a multiple of our constant. */
5754 if (code == MULT_EXPR
5755 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5756 return const_binop (code, fold_convert (ctype, t),
5757 fold_convert (ctype, c), 0);
5760 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5761 /* If op0 is an expression ... */
5762 if ((COMPARISON_CLASS_P (op0)
5763 || UNARY_CLASS_P (op0)
5764 || BINARY_CLASS_P (op0)
5765 || VL_EXP_CLASS_P (op0)
5766 || EXPRESSION_CLASS_P (op0))
5767 /* ... and is unsigned, and its type is smaller than ctype,
5768 then we cannot pass through as widening. */
5769 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5770 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5771 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5772 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5773 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5774 /* ... or this is a truncation (t is narrower than op0),
5775 then we cannot pass through this narrowing. */
5776 || (GET_MODE_SIZE (TYPE_MODE (type))
5777 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5778 /* ... or signedness changes for division or modulus,
5779 then we cannot pass through this conversion. */
5780 || (code != MULT_EXPR
5781 && (TYPE_UNSIGNED (ctype)
5782 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5785 /* Pass the constant down and see if we can make a simplification. If
5786 we can, replace this expression with the inner simplification for
5787 possible later conversion to our or some other type. */
5788 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5789 && TREE_CODE (t2) == INTEGER_CST
5790 && !TREE_OVERFLOW (t2)
5791 && (0 != (t1 = extract_muldiv (op0, t2, code,
5793 ? ctype : NULL_TREE,
5794 strict_overflow_p))))
5799 /* If widening the type changes it from signed to unsigned, then we
5800 must avoid building ABS_EXPR itself as unsigned. */
5801 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5803 tree cstype = (*signed_type_for) (ctype);
5804 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5807 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5808 return fold_convert (ctype, t1);
5814 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5816 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5819 case MIN_EXPR: case MAX_EXPR:
5820 /* If widening the type changes the signedness, then we can't perform
5821 this optimization as that changes the result. */
5822 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5825 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5826 sub_strict_overflow_p = false;
5827 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5828 &sub_strict_overflow_p)) != 0
5829 && (t2 = extract_muldiv (op1, c, code, wide_type,
5830 &sub_strict_overflow_p)) != 0)
5832 if (tree_int_cst_sgn (c) < 0)
5833 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5834 if (sub_strict_overflow_p)
5835 *strict_overflow_p = true;
5836 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5837 fold_convert (ctype, t2));
5841 case LSHIFT_EXPR: case RSHIFT_EXPR:
5842 /* If the second operand is constant, this is a multiplication
5843 or floor division, by a power of two, so we can treat it that
5844 way unless the multiplier or divisor overflows. Signed
5845 left-shift overflow is implementation-defined rather than
5846 undefined in C90, so do not convert signed left shift into
5848 if (TREE_CODE (op1) == INTEGER_CST
5849 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5850 /* const_binop may not detect overflow correctly,
5851 so check for it explicitly here. */
5852 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5853 && TREE_INT_CST_HIGH (op1) == 0
5854 && 0 != (t1 = fold_convert (ctype,
5855 const_binop (LSHIFT_EXPR,
5858 && !TREE_OVERFLOW (t1))
5859 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5860 ? MULT_EXPR : FLOOR_DIV_EXPR,
5861 ctype, fold_convert (ctype, op0), t1),
5862 c, code, wide_type, strict_overflow_p);
5865 case PLUS_EXPR: case MINUS_EXPR:
5866 /* See if we can eliminate the operation on both sides. If we can, we
5867 can return a new PLUS or MINUS. If we can't, the only remaining
5868 cases where we can do anything are if the second operand is a
5870 sub_strict_overflow_p = false;
5871 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5872 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5873 if (t1 != 0 && t2 != 0
5874 && (code == MULT_EXPR
5875 /* If not multiplication, we can only do this if both operands
5876 are divisible by c. */
5877 || (multiple_of_p (ctype, op0, c)
5878 && multiple_of_p (ctype, op1, c))))
5880 if (sub_strict_overflow_p)
5881 *strict_overflow_p = true;
5882 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5883 fold_convert (ctype, t2));
5886 /* If this was a subtraction, negate OP1 and set it to be an addition.
5887 This simplifies the logic below. */
5888 if (tcode == MINUS_EXPR)
5889 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5891 if (TREE_CODE (op1) != INTEGER_CST)
5894 /* If either OP1 or C are negative, this optimization is not safe for
5895 some of the division and remainder types while for others we need
5896 to change the code. */
5897 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5899 if (code == CEIL_DIV_EXPR)
5900 code = FLOOR_DIV_EXPR;
5901 else if (code == FLOOR_DIV_EXPR)
5902 code = CEIL_DIV_EXPR;
5903 else if (code != MULT_EXPR
5904 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5908 /* If it's a multiply or a division/modulus operation of a multiple
5909 of our constant, do the operation and verify it doesn't overflow. */
5910 if (code == MULT_EXPR
5911 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5913 op1 = const_binop (code, fold_convert (ctype, op1),
5914 fold_convert (ctype, c), 0);
5915 /* We allow the constant to overflow with wrapping semantics. */
5917 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5923 /* If we have an unsigned type is not a sizetype, we cannot widen
5924 the operation since it will change the result if the original
5925 computation overflowed. */
5926 if (TYPE_UNSIGNED (ctype)
5927 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5931 /* If we were able to eliminate our operation from the first side,
5932 apply our operation to the second side and reform the PLUS. */
5933 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5934 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5936 /* The last case is if we are a multiply. In that case, we can
5937 apply the distributive law to commute the multiply and addition
5938 if the multiplication of the constants doesn't overflow. */
5939 if (code == MULT_EXPR)
5940 return fold_build2 (tcode, ctype,
5941 fold_build2 (code, ctype,
5942 fold_convert (ctype, op0),
5943 fold_convert (ctype, c)),
5949 /* We have a special case here if we are doing something like
5950 (C * 8) % 4 since we know that's zero. */
5951 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5952 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5953 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5954 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5955 return omit_one_operand (type, integer_zero_node, op0);
5957 /* ... fall through ... */
5959 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5960 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5961 /* If we can extract our operation from the LHS, do so and return a
5962 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5963 do something only if the second operand is a constant. */
5965 && (t1 = extract_muldiv (op0, c, code, wide_type,
5966 strict_overflow_p)) != 0)
5967 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5968 fold_convert (ctype, op1));
5969 else if (tcode == MULT_EXPR && code == MULT_EXPR
5970 && (t1 = extract_muldiv (op1, c, code, wide_type,
5971 strict_overflow_p)) != 0)
5972 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5973 fold_convert (ctype, t1));
5974 else if (TREE_CODE (op1) != INTEGER_CST)
5977 /* If these are the same operation types, we can associate them
5978 assuming no overflow. */
5980 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5981 fold_convert (ctype, c), 0))
5982 && !TREE_OVERFLOW (t1))
5983 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5985 /* If these operations "cancel" each other, we have the main
5986 optimizations of this pass, which occur when either constant is a
5987 multiple of the other, in which case we replace this with either an
5988 operation or CODE or TCODE.
5990 If we have an unsigned type that is not a sizetype, we cannot do
5991 this since it will change the result if the original computation
5993 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5994 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5995 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5996 || (tcode == MULT_EXPR
5997 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5998 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
6000 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6002 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6003 *strict_overflow_p = true;
6004 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6005 fold_convert (ctype,
6006 const_binop (TRUNC_DIV_EXPR,
6009 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6011 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6012 *strict_overflow_p = true;
6013 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6014 fold_convert (ctype,
6015 const_binop (TRUNC_DIV_EXPR,
6028 /* Return a node which has the indicated constant VALUE (either 0 or
6029 1), and is of the indicated TYPE. */
6032 constant_boolean_node (int value, tree type)
6034 if (type == integer_type_node)
6035 return value ? integer_one_node : integer_zero_node;
6036 else if (type == boolean_type_node)
6037 return value ? boolean_true_node : boolean_false_node;
6039 return build_int_cst (type, value);
6043 /* Return true if expr looks like an ARRAY_REF and set base and
6044 offset to the appropriate trees. If there is no offset,
6045 offset is set to NULL_TREE. Base will be canonicalized to
6046 something you can get the element type from using
6047 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
6048 in bytes to the base in sizetype. */
6051 extract_array_ref (tree expr, tree *base, tree *offset)
6053 /* One canonical form is a PLUS_EXPR with the first
6054 argument being an ADDR_EXPR with a possible NOP_EXPR
6056 if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
6058 tree op0 = TREE_OPERAND (expr, 0);
6059 tree inner_base, dummy1;
6060 /* Strip NOP_EXPRs here because the C frontends and/or
6061 folders present us (int *)&x.a p+ 4 possibly. */
6063 if (extract_array_ref (op0, &inner_base, &dummy1))
6066 *offset = fold_convert (sizetype, TREE_OPERAND (expr, 1));
6067 if (dummy1 != NULL_TREE)
6068 *offset = fold_build2 (PLUS_EXPR, sizetype,
6073 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
6074 which we transform into an ADDR_EXPR with appropriate
6075 offset. For other arguments to the ADDR_EXPR we assume
6076 zero offset and as such do not care about the ADDR_EXPR
6077 type and strip possible nops from it. */
6078 else if (TREE_CODE (expr) == ADDR_EXPR)
6080 tree op0 = TREE_OPERAND (expr, 0);
6081 if (TREE_CODE (op0) == ARRAY_REF)
6083 tree idx = TREE_OPERAND (op0, 1);
6084 *base = TREE_OPERAND (op0, 0);
6085 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
6086 array_ref_element_size (op0));
6087 *offset = fold_convert (sizetype, *offset);
6091 /* Handle array-to-pointer decay as &a. */
6092 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
6093 *base = TREE_OPERAND (expr, 0);
6096 *offset = NULL_TREE;
6100 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
6101 else if (SSA_VAR_P (expr)
6102 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
6105 *offset = NULL_TREE;
6113 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6114 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6115 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6116 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6117 COND is the first argument to CODE; otherwise (as in the example
6118 given here), it is the second argument. TYPE is the type of the
6119 original expression. Return NULL_TREE if no simplification is
6123 fold_binary_op_with_conditional_arg (enum tree_code code,
6124 tree type, tree op0, tree op1,
6125 tree cond, tree arg, int cond_first_p)
6127 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6128 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6129 tree test, true_value, false_value;
6130 tree lhs = NULL_TREE;
6131 tree rhs = NULL_TREE;
6133 /* This transformation is only worthwhile if we don't have to wrap
6134 arg in a SAVE_EXPR, and the operation can be simplified on at least
6135 one of the branches once its pushed inside the COND_EXPR. */
6136 if (!TREE_CONSTANT (arg))
6139 if (TREE_CODE (cond) == COND_EXPR)
6141 test = TREE_OPERAND (cond, 0);
6142 true_value = TREE_OPERAND (cond, 1);
6143 false_value = TREE_OPERAND (cond, 2);
6144 /* If this operand throws an expression, then it does not make
6145 sense to try to perform a logical or arithmetic operation
6147 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6149 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6154 tree testtype = TREE_TYPE (cond);
6156 true_value = constant_boolean_node (true, testtype);
6157 false_value = constant_boolean_node (false, testtype);
6160 arg = fold_convert (arg_type, arg);
6163 true_value = fold_convert (cond_type, true_value);
6165 lhs = fold_build2 (code, type, true_value, arg);
6167 lhs = fold_build2 (code, type, arg, true_value);
6171 false_value = fold_convert (cond_type, false_value);
6173 rhs = fold_build2 (code, type, false_value, arg);
6175 rhs = fold_build2 (code, type, arg, false_value);
6178 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6179 return fold_convert (type, test);
6183 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6185 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6186 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6187 ADDEND is the same as X.
6189 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6190 and finite. The problematic cases are when X is zero, and its mode
6191 has signed zeros. In the case of rounding towards -infinity,
6192 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6193 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6196 fold_real_zero_addition_p (tree type, tree addend, int negate)
6198 if (!real_zerop (addend))
6201 /* Don't allow the fold with -fsignaling-nans. */
6202 if (HONOR_SNANS (TYPE_MODE (type)))
6205 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6206 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6209 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6210 if (TREE_CODE (addend) == REAL_CST
6211 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6214 /* The mode has signed zeros, and we have to honor their sign.
6215 In this situation, there is only one case we can return true for.
6216 X - 0 is the same as X unless rounding towards -infinity is
6218 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6221 /* Subroutine of fold() that checks comparisons of built-in math
6222 functions against real constants.
6224 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6225 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6226 is the type of the result and ARG0 and ARG1 are the operands of the
6227 comparison. ARG1 must be a TREE_REAL_CST.
6229 The function returns the constant folded tree if a simplification
6230 can be made, and NULL_TREE otherwise. */
6233 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6234 tree type, tree arg0, tree arg1)
6238 if (BUILTIN_SQRT_P (fcode))
6240 tree arg = CALL_EXPR_ARG (arg0, 0);
6241 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6243 c = TREE_REAL_CST (arg1);
6244 if (REAL_VALUE_NEGATIVE (c))
6246 /* sqrt(x) < y is always false, if y is negative. */
6247 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6248 return omit_one_operand (type, integer_zero_node, arg);
6250 /* sqrt(x) > y is always true, if y is negative and we
6251 don't care about NaNs, i.e. negative values of x. */
6252 if (code == NE_EXPR || !HONOR_NANS (mode))
6253 return omit_one_operand (type, integer_one_node, arg);
6255 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6256 return fold_build2 (GE_EXPR, type, arg,
6257 build_real (TREE_TYPE (arg), dconst0));
6259 else if (code == GT_EXPR || code == GE_EXPR)
6263 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6264 real_convert (&c2, mode, &c2);
6266 if (REAL_VALUE_ISINF (c2))
6268 /* sqrt(x) > y is x == +Inf, when y is very large. */
6269 if (HONOR_INFINITIES (mode))
6270 return fold_build2 (EQ_EXPR, type, arg,
6271 build_real (TREE_TYPE (arg), c2));
6273 /* sqrt(x) > y is always false, when y is very large
6274 and we don't care about infinities. */
6275 return omit_one_operand (type, integer_zero_node, arg);
6278 /* sqrt(x) > c is the same as x > c*c. */
6279 return fold_build2 (code, type, arg,
6280 build_real (TREE_TYPE (arg), c2));
6282 else if (code == LT_EXPR || code == LE_EXPR)
6286 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6287 real_convert (&c2, mode, &c2);
6289 if (REAL_VALUE_ISINF (c2))
6291 /* sqrt(x) < y is always true, when y is a very large
6292 value and we don't care about NaNs or Infinities. */
6293 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6294 return omit_one_operand (type, integer_one_node, arg);
6296 /* sqrt(x) < y is x != +Inf when y is very large and we
6297 don't care about NaNs. */
6298 if (! HONOR_NANS (mode))
6299 return fold_build2 (NE_EXPR, type, arg,
6300 build_real (TREE_TYPE (arg), c2));
6302 /* sqrt(x) < y is x >= 0 when y is very large and we
6303 don't care about Infinities. */
6304 if (! HONOR_INFINITIES (mode))
6305 return fold_build2 (GE_EXPR, type, arg,
6306 build_real (TREE_TYPE (arg), dconst0));
6308 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6309 if (lang_hooks.decls.global_bindings_p () != 0
6310 || CONTAINS_PLACEHOLDER_P (arg))
6313 arg = save_expr (arg);
6314 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6315 fold_build2 (GE_EXPR, type, arg,
6316 build_real (TREE_TYPE (arg),
6318 fold_build2 (NE_EXPR, type, arg,
6319 build_real (TREE_TYPE (arg),
6323 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6324 if (! HONOR_NANS (mode))
6325 return fold_build2 (code, type, arg,
6326 build_real (TREE_TYPE (arg), c2));
6328 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6329 if (lang_hooks.decls.global_bindings_p () == 0
6330 && ! CONTAINS_PLACEHOLDER_P (arg))
6332 arg = save_expr (arg);
6333 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6334 fold_build2 (GE_EXPR, type, arg,
6335 build_real (TREE_TYPE (arg),
6337 fold_build2 (code, type, arg,
6338 build_real (TREE_TYPE (arg),
6347 /* Subroutine of fold() that optimizes comparisons against Infinities,
6348 either +Inf or -Inf.
6350 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6351 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6352 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6354 The function returns the constant folded tree if a simplification
6355 can be made, and NULL_TREE otherwise. */
6358 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6360 enum machine_mode mode;
6361 REAL_VALUE_TYPE max;
6365 mode = TYPE_MODE (TREE_TYPE (arg0));
6367 /* For negative infinity swap the sense of the comparison. */
6368 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6370 code = swap_tree_comparison (code);
6375 /* x > +Inf is always false, if with ignore sNANs. */
6376 if (HONOR_SNANS (mode))
6378 return omit_one_operand (type, integer_zero_node, arg0);
6381 /* x <= +Inf is always true, if we don't case about NaNs. */
6382 if (! HONOR_NANS (mode))
6383 return omit_one_operand (type, integer_one_node, arg0);
6385 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6386 if (lang_hooks.decls.global_bindings_p () == 0
6387 && ! CONTAINS_PLACEHOLDER_P (arg0))
6389 arg0 = save_expr (arg0);
6390 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6396 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6397 real_maxval (&max, neg, mode);
6398 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6399 arg0, build_real (TREE_TYPE (arg0), max));
6402 /* x < +Inf is always equal to x <= DBL_MAX. */
6403 real_maxval (&max, neg, mode);
6404 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6405 arg0, build_real (TREE_TYPE (arg0), max));
6408 /* x != +Inf is always equal to !(x > DBL_MAX). */
6409 real_maxval (&max, neg, mode);
6410 if (! HONOR_NANS (mode))
6411 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6412 arg0, build_real (TREE_TYPE (arg0), max));
6414 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6415 arg0, build_real (TREE_TYPE (arg0), max));
6416 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6425 /* Subroutine of fold() that optimizes comparisons of a division by
6426 a nonzero integer constant against an integer constant, i.e.
6429 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6430 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6431 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6433 The function returns the constant folded tree if a simplification
6434 can be made, and NULL_TREE otherwise. */
6437 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6439 tree prod, tmp, hi, lo;
6440 tree arg00 = TREE_OPERAND (arg0, 0);
6441 tree arg01 = TREE_OPERAND (arg0, 1);
6442 unsigned HOST_WIDE_INT lpart;
6443 HOST_WIDE_INT hpart;
6444 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6448 /* We have to do this the hard way to detect unsigned overflow.
6449 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6450 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6451 TREE_INT_CST_HIGH (arg01),
6452 TREE_INT_CST_LOW (arg1),
6453 TREE_INT_CST_HIGH (arg1),
6454 &lpart, &hpart, unsigned_p);
6455 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6457 neg_overflow = false;
6461 tmp = int_const_binop (MINUS_EXPR, arg01,
6462 build_int_cst (TREE_TYPE (arg01), 1), 0);
6465 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6466 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6467 TREE_INT_CST_HIGH (prod),
6468 TREE_INT_CST_LOW (tmp),
6469 TREE_INT_CST_HIGH (tmp),
6470 &lpart, &hpart, unsigned_p);
6471 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6472 -1, overflow | TREE_OVERFLOW (prod));
6474 else if (tree_int_cst_sgn (arg01) >= 0)
6476 tmp = int_const_binop (MINUS_EXPR, arg01,
6477 build_int_cst (TREE_TYPE (arg01), 1), 0);
6478 switch (tree_int_cst_sgn (arg1))
6481 neg_overflow = true;
6482 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6487 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6492 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6502 /* A negative divisor reverses the relational operators. */
6503 code = swap_tree_comparison (code);
6505 tmp = int_const_binop (PLUS_EXPR, arg01,
6506 build_int_cst (TREE_TYPE (arg01), 1), 0);
6507 switch (tree_int_cst_sgn (arg1))
6510 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6515 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6520 neg_overflow = true;
6521 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6533 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6534 return omit_one_operand (type, integer_zero_node, arg00);
6535 if (TREE_OVERFLOW (hi))
6536 return fold_build2 (GE_EXPR, type, arg00, lo);
6537 if (TREE_OVERFLOW (lo))
6538 return fold_build2 (LE_EXPR, type, arg00, hi);
6539 return build_range_check (type, arg00, 1, lo, hi);
6542 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6543 return omit_one_operand (type, integer_one_node, arg00);
6544 if (TREE_OVERFLOW (hi))
6545 return fold_build2 (LT_EXPR, type, arg00, lo);
6546 if (TREE_OVERFLOW (lo))
6547 return fold_build2 (GT_EXPR, type, arg00, hi);
6548 return build_range_check (type, arg00, 0, lo, hi);
6551 if (TREE_OVERFLOW (lo))
6553 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6554 return omit_one_operand (type, tmp, arg00);
6556 return fold_build2 (LT_EXPR, type, arg00, lo);
6559 if (TREE_OVERFLOW (hi))
6561 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6562 return omit_one_operand (type, tmp, arg00);
6564 return fold_build2 (LE_EXPR, type, arg00, hi);
6567 if (TREE_OVERFLOW (hi))
6569 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6570 return omit_one_operand (type, tmp, arg00);
6572 return fold_build2 (GT_EXPR, type, arg00, hi);
6575 if (TREE_OVERFLOW (lo))
6577 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6578 return omit_one_operand (type, tmp, arg00);
6580 return fold_build2 (GE_EXPR, type, arg00, lo);
6590 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6591 equality/inequality test, then return a simplified form of the test
6592 using a sign testing. Otherwise return NULL. TYPE is the desired
6596 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6599 /* If this is testing a single bit, we can optimize the test. */
6600 if ((code == NE_EXPR || code == EQ_EXPR)
6601 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6602 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6604 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6605 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6606 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6608 if (arg00 != NULL_TREE
6609 /* This is only a win if casting to a signed type is cheap,
6610 i.e. when arg00's type is not a partial mode. */
6611 && TYPE_PRECISION (TREE_TYPE (arg00))
6612 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6614 tree stype = signed_type_for (TREE_TYPE (arg00));
6615 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6616 result_type, fold_convert (stype, arg00),
6617 build_int_cst (stype, 0));
6624 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6625 equality/inequality test, then return a simplified form of
6626 the test using shifts and logical operations. Otherwise return
6627 NULL. TYPE is the desired result type. */
6630 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6633 /* If this is testing a single bit, we can optimize the test. */
6634 if ((code == NE_EXPR || code == EQ_EXPR)
6635 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6636 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6638 tree inner = TREE_OPERAND (arg0, 0);
6639 tree type = TREE_TYPE (arg0);
6640 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6641 enum machine_mode operand_mode = TYPE_MODE (type);
6643 tree signed_type, unsigned_type, intermediate_type;
6646 /* First, see if we can fold the single bit test into a sign-bit
6648 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6653 /* Otherwise we have (A & C) != 0 where C is a single bit,
6654 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6655 Similarly for (A & C) == 0. */
6657 /* If INNER is a right shift of a constant and it plus BITNUM does
6658 not overflow, adjust BITNUM and INNER. */
6659 if (TREE_CODE (inner) == RSHIFT_EXPR
6660 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6661 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6662 && bitnum < TYPE_PRECISION (type)
6663 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6664 bitnum - TYPE_PRECISION (type)))
6666 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6667 inner = TREE_OPERAND (inner, 0);
6670 /* If we are going to be able to omit the AND below, we must do our
6671 operations as unsigned. If we must use the AND, we have a choice.
6672 Normally unsigned is faster, but for some machines signed is. */
6673 #ifdef LOAD_EXTEND_OP
6674 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6675 && !flag_syntax_only) ? 0 : 1;
6680 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6681 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6682 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6683 inner = fold_convert (intermediate_type, inner);
6686 inner = build2 (RSHIFT_EXPR, intermediate_type,
6687 inner, size_int (bitnum));
6689 one = build_int_cst (intermediate_type, 1);
6691 if (code == EQ_EXPR)
6692 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6694 /* Put the AND last so it can combine with more things. */
6695 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6697 /* Make sure to return the proper type. */
6698 inner = fold_convert (result_type, inner);
6705 /* Check whether we are allowed to reorder operands arg0 and arg1,
6706 such that the evaluation of arg1 occurs before arg0. */
6709 reorder_operands_p (tree arg0, tree arg1)
6711 if (! flag_evaluation_order)
6713 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6715 return ! TREE_SIDE_EFFECTS (arg0)
6716 && ! TREE_SIDE_EFFECTS (arg1);
6719 /* Test whether it is preferable two swap two operands, ARG0 and
6720 ARG1, for example because ARG0 is an integer constant and ARG1
6721 isn't. If REORDER is true, only recommend swapping if we can
6722 evaluate the operands in reverse order. */
6725 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6727 STRIP_SIGN_NOPS (arg0);
6728 STRIP_SIGN_NOPS (arg1);
6730 if (TREE_CODE (arg1) == INTEGER_CST)
6732 if (TREE_CODE (arg0) == INTEGER_CST)
6735 if (TREE_CODE (arg1) == REAL_CST)
6737 if (TREE_CODE (arg0) == REAL_CST)
6740 if (TREE_CODE (arg1) == COMPLEX_CST)
6742 if (TREE_CODE (arg0) == COMPLEX_CST)
6745 if (TREE_CONSTANT (arg1))
6747 if (TREE_CONSTANT (arg0))
6753 if (reorder && flag_evaluation_order
6754 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6757 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6758 for commutative and comparison operators. Ensuring a canonical
6759 form allows the optimizers to find additional redundancies without
6760 having to explicitly check for both orderings. */
6761 if (TREE_CODE (arg0) == SSA_NAME
6762 && TREE_CODE (arg1) == SSA_NAME
6763 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6766 /* Put SSA_NAMEs last. */
6767 if (TREE_CODE (arg1) == SSA_NAME)
6769 if (TREE_CODE (arg0) == SSA_NAME)
6772 /* Put variables last. */
6781 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6782 ARG0 is extended to a wider type. */
6785 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6787 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6789 tree shorter_type, outer_type;
6793 if (arg0_unw == arg0)
6795 shorter_type = TREE_TYPE (arg0_unw);
6797 #ifdef HAVE_canonicalize_funcptr_for_compare
6798 /* Disable this optimization if we're casting a function pointer
6799 type on targets that require function pointer canonicalization. */
6800 if (HAVE_canonicalize_funcptr_for_compare
6801 && TREE_CODE (shorter_type) == POINTER_TYPE
6802 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6806 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6809 arg1_unw = get_unwidened (arg1, shorter_type);
6811 /* If possible, express the comparison in the shorter mode. */
6812 if ((code == EQ_EXPR || code == NE_EXPR
6813 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6814 && (TREE_TYPE (arg1_unw) == shorter_type
6815 || (TREE_CODE (arg1_unw) == INTEGER_CST
6816 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6817 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6818 && int_fits_type_p (arg1_unw, shorter_type))))
6819 return fold_build2 (code, type, arg0_unw,
6820 fold_convert (shorter_type, arg1_unw));
6822 if (TREE_CODE (arg1_unw) != INTEGER_CST
6823 || TREE_CODE (shorter_type) != INTEGER_TYPE
6824 || !int_fits_type_p (arg1_unw, shorter_type))
6827 /* If we are comparing with the integer that does not fit into the range
6828 of the shorter type, the result is known. */
6829 outer_type = TREE_TYPE (arg1_unw);
6830 min = lower_bound_in_type (outer_type, shorter_type);
6831 max = upper_bound_in_type (outer_type, shorter_type);
6833 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6835 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6842 return omit_one_operand (type, integer_zero_node, arg0);
6847 return omit_one_operand (type, integer_one_node, arg0);
6853 return omit_one_operand (type, integer_one_node, arg0);
6855 return omit_one_operand (type, integer_zero_node, arg0);
6860 return omit_one_operand (type, integer_zero_node, arg0);
6862 return omit_one_operand (type, integer_one_node, arg0);
6871 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6872 ARG0 just the signedness is changed. */
6875 fold_sign_changed_comparison (enum tree_code code, tree type,
6876 tree arg0, tree arg1)
6879 tree inner_type, outer_type;
6881 if (TREE_CODE (arg0) != NOP_EXPR
6882 && TREE_CODE (arg0) != CONVERT_EXPR)
6885 outer_type = TREE_TYPE (arg0);
6886 arg0_inner = TREE_OPERAND (arg0, 0);
6887 inner_type = TREE_TYPE (arg0_inner);
6889 #ifdef HAVE_canonicalize_funcptr_for_compare
6890 /* Disable this optimization if we're casting a function pointer
6891 type on targets that require function pointer canonicalization. */
6892 if (HAVE_canonicalize_funcptr_for_compare
6893 && TREE_CODE (inner_type) == POINTER_TYPE
6894 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6898 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6901 if (TREE_CODE (arg1) != INTEGER_CST
6902 && !((TREE_CODE (arg1) == NOP_EXPR
6903 || TREE_CODE (arg1) == CONVERT_EXPR)
6904 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6907 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6912 if (TREE_CODE (arg1) == INTEGER_CST)
6913 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6914 TREE_INT_CST_HIGH (arg1), 0,
6915 TREE_OVERFLOW (arg1));
6917 arg1 = fold_convert (inner_type, arg1);
6919 return fold_build2 (code, type, arg0_inner, arg1);
6922 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6923 step of the array. Reconstructs s and delta in the case of s * delta
6924 being an integer constant (and thus already folded).
6925 ADDR is the address. MULT is the multiplicative expression.
6926 If the function succeeds, the new address expression is returned. Otherwise
6927 NULL_TREE is returned. */
6930 try_move_mult_to_index (tree addr, tree op1)
6932 tree s, delta, step;
6933 tree ref = TREE_OPERAND (addr, 0), pref;
6938 /* Strip the nops that might be added when converting op1 to sizetype. */
6941 /* Canonicalize op1 into a possibly non-constant delta
6942 and an INTEGER_CST s. */
6943 if (TREE_CODE (op1) == MULT_EXPR)
6945 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6950 if (TREE_CODE (arg0) == INTEGER_CST)
6955 else if (TREE_CODE (arg1) == INTEGER_CST)
6963 else if (TREE_CODE (op1) == INTEGER_CST)
6970 /* Simulate we are delta * 1. */
6972 s = integer_one_node;
6975 for (;; ref = TREE_OPERAND (ref, 0))
6977 if (TREE_CODE (ref) == ARRAY_REF)
6979 /* Remember if this was a multi-dimensional array. */
6980 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6983 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6987 step = array_ref_element_size (ref);
6988 if (TREE_CODE (step) != INTEGER_CST)
6993 if (! tree_int_cst_equal (step, s))
6998 /* Try if delta is a multiple of step. */
6999 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
7005 /* Only fold here if we can verify we do not overflow one
7006 dimension of a multi-dimensional array. */
7011 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7012 || !INTEGRAL_TYPE_P (itype)
7013 || !TYPE_MAX_VALUE (itype)
7014 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7017 tmp = fold_binary (PLUS_EXPR, itype,
7018 fold_convert (itype,
7019 TREE_OPERAND (ref, 1)),
7020 fold_convert (itype, delta));
7022 || TREE_CODE (tmp) != INTEGER_CST
7023 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7032 if (!handled_component_p (ref))
7036 /* We found the suitable array reference. So copy everything up to it,
7037 and replace the index. */
7039 pref = TREE_OPERAND (addr, 0);
7040 ret = copy_node (pref);
7045 pref = TREE_OPERAND (pref, 0);
7046 TREE_OPERAND (pos, 0) = copy_node (pref);
7047 pos = TREE_OPERAND (pos, 0);
7050 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7051 fold_convert (itype,
7052 TREE_OPERAND (pos, 1)),
7053 fold_convert (itype, delta));
7055 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7059 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7060 means A >= Y && A != MAX, but in this case we know that
7061 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7064 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7066 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7068 if (TREE_CODE (bound) == LT_EXPR)
7069 a = TREE_OPERAND (bound, 0);
7070 else if (TREE_CODE (bound) == GT_EXPR)
7071 a = TREE_OPERAND (bound, 1);
7075 typea = TREE_TYPE (a);
7076 if (!INTEGRAL_TYPE_P (typea)
7077 && !POINTER_TYPE_P (typea))
7080 if (TREE_CODE (ineq) == LT_EXPR)
7082 a1 = TREE_OPERAND (ineq, 1);
7083 y = TREE_OPERAND (ineq, 0);
7085 else if (TREE_CODE (ineq) == GT_EXPR)
7087 a1 = TREE_OPERAND (ineq, 0);
7088 y = TREE_OPERAND (ineq, 1);
7093 if (TREE_TYPE (a1) != typea)
7096 if (POINTER_TYPE_P (typea))
7098 /* Convert the pointer types into integer before taking the difference. */
7099 tree ta = fold_convert (ssizetype, a);
7100 tree ta1 = fold_convert (ssizetype, a1);
7101 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7104 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7106 if (!diff || !integer_onep (diff))
7109 return fold_build2 (GE_EXPR, type, a, y);
7112 /* Fold a sum or difference of at least one multiplication.
7113 Returns the folded tree or NULL if no simplification could be made. */
7116 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7118 tree arg00, arg01, arg10, arg11;
7119 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7121 /* (A * C) +- (B * C) -> (A+-B) * C.
7122 (A * C) +- A -> A * (C+-1).
7123 We are most concerned about the case where C is a constant,
7124 but other combinations show up during loop reduction. Since
7125 it is not difficult, try all four possibilities. */
7127 if (TREE_CODE (arg0) == MULT_EXPR)
7129 arg00 = TREE_OPERAND (arg0, 0);
7130 arg01 = TREE_OPERAND (arg0, 1);
7132 else if (TREE_CODE (arg0) == INTEGER_CST)
7134 arg00 = build_one_cst (type);
7140 arg01 = build_one_cst (type);
7142 if (TREE_CODE (arg1) == MULT_EXPR)
7144 arg10 = TREE_OPERAND (arg1, 0);
7145 arg11 = TREE_OPERAND (arg1, 1);
7147 else if (TREE_CODE (arg1) == INTEGER_CST)
7149 arg10 = build_one_cst (type);
7155 arg11 = build_one_cst (type);
7159 if (operand_equal_p (arg01, arg11, 0))
7160 same = arg01, alt0 = arg00, alt1 = arg10;
7161 else if (operand_equal_p (arg00, arg10, 0))
7162 same = arg00, alt0 = arg01, alt1 = arg11;
7163 else if (operand_equal_p (arg00, arg11, 0))
7164 same = arg00, alt0 = arg01, alt1 = arg10;
7165 else if (operand_equal_p (arg01, arg10, 0))
7166 same = arg01, alt0 = arg00, alt1 = arg11;
7168 /* No identical multiplicands; see if we can find a common
7169 power-of-two factor in non-power-of-two multiplies. This
7170 can help in multi-dimensional array access. */
7171 else if (host_integerp (arg01, 0)
7172 && host_integerp (arg11, 0))
7174 HOST_WIDE_INT int01, int11, tmp;
7177 int01 = TREE_INT_CST_LOW (arg01);
7178 int11 = TREE_INT_CST_LOW (arg11);
7180 /* Move min of absolute values to int11. */
7181 if ((int01 >= 0 ? int01 : -int01)
7182 < (int11 >= 0 ? int11 : -int11))
7184 tmp = int01, int01 = int11, int11 = tmp;
7185 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7192 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7194 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7195 build_int_cst (TREE_TYPE (arg00),
7200 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7205 return fold_build2 (MULT_EXPR, type,
7206 fold_build2 (code, type,
7207 fold_convert (type, alt0),
7208 fold_convert (type, alt1)),
7209 fold_convert (type, same));
7214 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7215 specified by EXPR into the buffer PTR of length LEN bytes.
7216 Return the number of bytes placed in the buffer, or zero
7220 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7222 tree type = TREE_TYPE (expr);
7223 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7224 int byte, offset, word, words;
7225 unsigned char value;
7227 if (total_bytes > len)
7229 words = total_bytes / UNITS_PER_WORD;
7231 for (byte = 0; byte < total_bytes; byte++)
7233 int bitpos = byte * BITS_PER_UNIT;
7234 if (bitpos < HOST_BITS_PER_WIDE_INT)
7235 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7237 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7238 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7240 if (total_bytes > UNITS_PER_WORD)
7242 word = byte / UNITS_PER_WORD;
7243 if (WORDS_BIG_ENDIAN)
7244 word = (words - 1) - word;
7245 offset = word * UNITS_PER_WORD;
7246 if (BYTES_BIG_ENDIAN)
7247 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7249 offset += byte % UNITS_PER_WORD;
7252 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7253 ptr[offset] = value;
7259 /* Subroutine of native_encode_expr. Encode the REAL_CST
7260 specified by EXPR into the buffer PTR of length LEN bytes.
7261 Return the number of bytes placed in the buffer, or zero
7265 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7267 tree type = TREE_TYPE (expr);
7268 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7269 int byte, offset, word, words, bitpos;
7270 unsigned char value;
7272 /* There are always 32 bits in each long, no matter the size of
7273 the hosts long. We handle floating point representations with
7277 if (total_bytes > len)
7279 words = 32 / UNITS_PER_WORD;
7281 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7283 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7284 bitpos += BITS_PER_UNIT)
7286 byte = (bitpos / BITS_PER_UNIT) & 3;
7287 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7289 if (UNITS_PER_WORD < 4)
7291 word = byte / UNITS_PER_WORD;
7292 if (WORDS_BIG_ENDIAN)
7293 word = (words - 1) - word;
7294 offset = word * UNITS_PER_WORD;
7295 if (BYTES_BIG_ENDIAN)
7296 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7298 offset += byte % UNITS_PER_WORD;
7301 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7302 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7307 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7308 specified by EXPR into the buffer PTR of length LEN bytes.
7309 Return the number of bytes placed in the buffer, or zero
7313 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7318 part = TREE_REALPART (expr);
7319 rsize = native_encode_expr (part, ptr, len);
7322 part = TREE_IMAGPART (expr);
7323 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7326 return rsize + isize;
7330 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7331 specified by EXPR into the buffer PTR of length LEN bytes.
7332 Return the number of bytes placed in the buffer, or zero
7336 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7338 int i, size, offset, count;
7339 tree itype, elem, elements;
7342 elements = TREE_VECTOR_CST_ELTS (expr);
7343 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7344 itype = TREE_TYPE (TREE_TYPE (expr));
7345 size = GET_MODE_SIZE (TYPE_MODE (itype));
7346 for (i = 0; i < count; i++)
7350 elem = TREE_VALUE (elements);
7351 elements = TREE_CHAIN (elements);
7358 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7363 if (offset + size > len)
7365 memset (ptr+offset, 0, size);
7373 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7374 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7375 buffer PTR of length LEN bytes. Return the number of bytes
7376 placed in the buffer, or zero upon failure. */
7379 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7381 switch (TREE_CODE (expr))
7384 return native_encode_int (expr, ptr, len);
7387 return native_encode_real (expr, ptr, len);
7390 return native_encode_complex (expr, ptr, len);
7393 return native_encode_vector (expr, ptr, len);
7401 /* Subroutine of native_interpret_expr. Interpret the contents of
7402 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7403 If the buffer cannot be interpreted, return NULL_TREE. */
7406 native_interpret_int (tree type, const unsigned char *ptr, int len)
7408 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7409 int byte, offset, word, words;
7410 unsigned char value;
7411 unsigned int HOST_WIDE_INT lo = 0;
7412 HOST_WIDE_INT hi = 0;
7414 if (total_bytes > len)
7416 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7418 words = total_bytes / UNITS_PER_WORD;
7420 for (byte = 0; byte < total_bytes; byte++)
7422 int bitpos = byte * BITS_PER_UNIT;
7423 if (total_bytes > UNITS_PER_WORD)
7425 word = byte / UNITS_PER_WORD;
7426 if (WORDS_BIG_ENDIAN)
7427 word = (words - 1) - word;
7428 offset = word * UNITS_PER_WORD;
7429 if (BYTES_BIG_ENDIAN)
7430 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7432 offset += byte % UNITS_PER_WORD;
7435 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7436 value = ptr[offset];
7438 if (bitpos < HOST_BITS_PER_WIDE_INT)
7439 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7441 hi |= (unsigned HOST_WIDE_INT) value
7442 << (bitpos - HOST_BITS_PER_WIDE_INT);
7445 return build_int_cst_wide_type (type, lo, hi);
7449 /* Subroutine of native_interpret_expr. Interpret the contents of
7450 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7451 If the buffer cannot be interpreted, return NULL_TREE. */
7454 native_interpret_real (tree type, const unsigned char *ptr, int len)
7456 enum machine_mode mode = TYPE_MODE (type);
7457 int total_bytes = GET_MODE_SIZE (mode);
7458 int byte, offset, word, words, bitpos;
7459 unsigned char value;
7460 /* There are always 32 bits in each long, no matter the size of
7461 the hosts long. We handle floating point representations with
7466 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7467 if (total_bytes > len || total_bytes > 24)
7469 words = 32 / UNITS_PER_WORD;
7471 memset (tmp, 0, sizeof (tmp));
7472 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7473 bitpos += BITS_PER_UNIT)
7475 byte = (bitpos / BITS_PER_UNIT) & 3;
7476 if (UNITS_PER_WORD < 4)
7478 word = byte / UNITS_PER_WORD;
7479 if (WORDS_BIG_ENDIAN)
7480 word = (words - 1) - word;
7481 offset = word * UNITS_PER_WORD;
7482 if (BYTES_BIG_ENDIAN)
7483 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7485 offset += byte % UNITS_PER_WORD;
7488 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7489 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7491 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7494 real_from_target (&r, tmp, mode);
7495 return build_real (type, r);
7499 /* Subroutine of native_interpret_expr. Interpret the contents of
7500 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7501 If the buffer cannot be interpreted, return NULL_TREE. */
7504 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7506 tree etype, rpart, ipart;
7509 etype = TREE_TYPE (type);
7510 size = GET_MODE_SIZE (TYPE_MODE (etype));
7513 rpart = native_interpret_expr (etype, ptr, size);
7516 ipart = native_interpret_expr (etype, ptr+size, size);
7519 return build_complex (type, rpart, ipart);
7523 /* Subroutine of native_interpret_expr. Interpret the contents of
7524 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7525 If the buffer cannot be interpreted, return NULL_TREE. */
7528 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7530 tree etype, elem, elements;
7533 etype = TREE_TYPE (type);
7534 size = GET_MODE_SIZE (TYPE_MODE (etype));
7535 count = TYPE_VECTOR_SUBPARTS (type);
7536 if (size * count > len)
7539 elements = NULL_TREE;
7540 for (i = count - 1; i >= 0; i--)
7542 elem = native_interpret_expr (etype, ptr+(i*size), size);
7545 elements = tree_cons (NULL_TREE, elem, elements);
7547 return build_vector (type, elements);
7551 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7552 the buffer PTR of length LEN as a constant of type TYPE. For
7553 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7554 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7555 return NULL_TREE. */
7558 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7560 switch (TREE_CODE (type))
7565 return native_interpret_int (type, ptr, len);
7568 return native_interpret_real (type, ptr, len);
7571 return native_interpret_complex (type, ptr, len);
7574 return native_interpret_vector (type, ptr, len);
7582 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7583 TYPE at compile-time. If we're unable to perform the conversion
7584 return NULL_TREE. */
7587 fold_view_convert_expr (tree type, tree expr)
7589 /* We support up to 512-bit values (for V8DFmode). */
7590 unsigned char buffer[64];
7593 /* Check that the host and target are sane. */
7594 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7597 len = native_encode_expr (expr, buffer, sizeof (buffer));
7601 return native_interpret_expr (type, buffer, len);
7604 /* Build an expression for the address of T. Folds away INDIRECT_REF
7605 to avoid confusing the gimplify process. When IN_FOLD is true
7606 avoid modifications of T. */
7609 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7611 /* The size of the object is not relevant when talking about its address. */
7612 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7613 t = TREE_OPERAND (t, 0);
7615 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7616 if (TREE_CODE (t) == INDIRECT_REF
7617 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7619 t = TREE_OPERAND (t, 0);
7621 if (TREE_TYPE (t) != ptrtype)
7622 t = build1 (NOP_EXPR, ptrtype, t);
7628 while (handled_component_p (base))
7629 base = TREE_OPERAND (base, 0);
7632 TREE_ADDRESSABLE (base) = 1;
7634 t = build1 (ADDR_EXPR, ptrtype, t);
7637 t = build1 (ADDR_EXPR, ptrtype, t);
7642 /* Build an expression for the address of T with type PTRTYPE. This
7643 function modifies the input parameter 'T' by sometimes setting the
7644 TREE_ADDRESSABLE flag. */
7647 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7649 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7652 /* Build an expression for the address of T. This function modifies
7653 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7654 flag. When called from fold functions, use fold_addr_expr instead. */
7657 build_fold_addr_expr (tree t)
7659 return build_fold_addr_expr_with_type_1 (t,
7660 build_pointer_type (TREE_TYPE (t)),
7664 /* Same as build_fold_addr_expr, builds an expression for the address
7665 of T, but avoids touching the input node 't'. Fold functions
7666 should use this version. */
7669 fold_addr_expr (tree t)
7671 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7673 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7676 /* Fold a unary expression of code CODE and type TYPE with operand
7677 OP0. Return the folded expression if folding is successful.
7678 Otherwise, return NULL_TREE. */
7681 fold_unary (enum tree_code code, tree type, tree op0)
7685 enum tree_code_class kind = TREE_CODE_CLASS (code);
7687 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7688 && TREE_CODE_LENGTH (code) == 1);
7693 if (code == NOP_EXPR || code == CONVERT_EXPR
7694 || code == FLOAT_EXPR || code == ABS_EXPR)
7696 /* Don't use STRIP_NOPS, because signedness of argument type
7698 STRIP_SIGN_NOPS (arg0);
7702 /* Strip any conversions that don't change the mode. This
7703 is safe for every expression, except for a comparison
7704 expression because its signedness is derived from its
7707 Note that this is done as an internal manipulation within
7708 the constant folder, in order to find the simplest
7709 representation of the arguments so that their form can be
7710 studied. In any cases, the appropriate type conversions
7711 should be put back in the tree that will get out of the
7717 if (TREE_CODE_CLASS (code) == tcc_unary)
7719 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7720 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7721 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7722 else if (TREE_CODE (arg0) == COND_EXPR)
7724 tree arg01 = TREE_OPERAND (arg0, 1);
7725 tree arg02 = TREE_OPERAND (arg0, 2);
7726 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7727 arg01 = fold_build1 (code, type, arg01);
7728 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7729 arg02 = fold_build1 (code, type, arg02);
7730 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7733 /* If this was a conversion, and all we did was to move into
7734 inside the COND_EXPR, bring it back out. But leave it if
7735 it is a conversion from integer to integer and the
7736 result precision is no wider than a word since such a
7737 conversion is cheap and may be optimized away by combine,
7738 while it couldn't if it were outside the COND_EXPR. Then return
7739 so we don't get into an infinite recursion loop taking the
7740 conversion out and then back in. */
7742 if ((code == NOP_EXPR || code == CONVERT_EXPR
7743 || code == NON_LVALUE_EXPR)
7744 && TREE_CODE (tem) == COND_EXPR
7745 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7746 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7747 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7748 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7749 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7750 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7751 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7753 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7754 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7755 || flag_syntax_only))
7756 tem = build1 (code, type,
7758 TREE_TYPE (TREE_OPERAND
7759 (TREE_OPERAND (tem, 1), 0)),
7760 TREE_OPERAND (tem, 0),
7761 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7762 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7765 else if (COMPARISON_CLASS_P (arg0))
7767 if (TREE_CODE (type) == BOOLEAN_TYPE)
7769 arg0 = copy_node (arg0);
7770 TREE_TYPE (arg0) = type;
7773 else if (TREE_CODE (type) != INTEGER_TYPE)
7774 return fold_build3 (COND_EXPR, type, arg0,
7775 fold_build1 (code, type,
7777 fold_build1 (code, type,
7778 integer_zero_node));
7787 case FIX_TRUNC_EXPR:
7788 if (TREE_TYPE (op0) == type)
7791 /* If we have (type) (a CMP b) and type is an integral type, return
7792 new expression involving the new type. */
7793 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7794 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7795 TREE_OPERAND (op0, 1));
7797 /* Handle cases of two conversions in a row. */
7798 if (TREE_CODE (op0) == NOP_EXPR
7799 || TREE_CODE (op0) == CONVERT_EXPR)
7801 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7802 tree inter_type = TREE_TYPE (op0);
7803 int inside_int = INTEGRAL_TYPE_P (inside_type);
7804 int inside_ptr = POINTER_TYPE_P (inside_type);
7805 int inside_float = FLOAT_TYPE_P (inside_type);
7806 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7807 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7808 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7809 int inter_int = INTEGRAL_TYPE_P (inter_type);
7810 int inter_ptr = POINTER_TYPE_P (inter_type);
7811 int inter_float = FLOAT_TYPE_P (inter_type);
7812 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7813 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7814 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7815 int final_int = INTEGRAL_TYPE_P (type);
7816 int final_ptr = POINTER_TYPE_P (type);
7817 int final_float = FLOAT_TYPE_P (type);
7818 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7819 unsigned int final_prec = TYPE_PRECISION (type);
7820 int final_unsignedp = TYPE_UNSIGNED (type);
7822 /* In addition to the cases of two conversions in a row
7823 handled below, if we are converting something to its own
7824 type via an object of identical or wider precision, neither
7825 conversion is needed. */
7826 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7827 && (((inter_int || inter_ptr) && final_int)
7828 || (inter_float && final_float))
7829 && inter_prec >= final_prec)
7830 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7832 /* Likewise, if the intermediate and final types are either both
7833 float or both integer, we don't need the middle conversion if
7834 it is wider than the final type and doesn't change the signedness
7835 (for integers). Avoid this if the final type is a pointer
7836 since then we sometimes need the inner conversion. Likewise if
7837 the outer has a precision not equal to the size of its mode. */
7838 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7839 || (inter_float && inside_float)
7840 || (inter_vec && inside_vec))
7841 && inter_prec >= inside_prec
7842 && (inter_float || inter_vec
7843 || inter_unsignedp == inside_unsignedp)
7844 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7845 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7847 && (! final_vec || inter_prec == inside_prec))
7848 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7850 /* If we have a sign-extension of a zero-extended value, we can
7851 replace that by a single zero-extension. */
7852 if (inside_int && inter_int && final_int
7853 && inside_prec < inter_prec && inter_prec < final_prec
7854 && inside_unsignedp && !inter_unsignedp)
7855 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7857 /* Two conversions in a row are not needed unless:
7858 - some conversion is floating-point (overstrict for now), or
7859 - some conversion is a vector (overstrict for now), or
7860 - the intermediate type is narrower than both initial and
7862 - the intermediate type and innermost type differ in signedness,
7863 and the outermost type is wider than the intermediate, or
7864 - the initial type is a pointer type and the precisions of the
7865 intermediate and final types differ, or
7866 - the final type is a pointer type and the precisions of the
7867 initial and intermediate types differ.
7868 - the final type is a pointer type and the initial type not
7869 - the initial type is a pointer to an array and the final type
7871 if (! inside_float && ! inter_float && ! final_float
7872 && ! inside_vec && ! inter_vec && ! final_vec
7873 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7874 && ! (inside_int && inter_int
7875 && inter_unsignedp != inside_unsignedp
7876 && inter_prec < final_prec)
7877 && ((inter_unsignedp && inter_prec > inside_prec)
7878 == (final_unsignedp && final_prec > inter_prec))
7879 && ! (inside_ptr && inter_prec != final_prec)
7880 && ! (final_ptr && inside_prec != inter_prec)
7881 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7882 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7883 && final_ptr == inside_ptr
7885 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7886 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7887 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7890 /* Handle (T *)&A.B.C for A being of type T and B and C
7891 living at offset zero. This occurs frequently in
7892 C++ upcasting and then accessing the base. */
7893 if (TREE_CODE (op0) == ADDR_EXPR
7894 && POINTER_TYPE_P (type)
7895 && handled_component_p (TREE_OPERAND (op0, 0)))
7897 HOST_WIDE_INT bitsize, bitpos;
7899 enum machine_mode mode;
7900 int unsignedp, volatilep;
7901 tree base = TREE_OPERAND (op0, 0);
7902 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7903 &mode, &unsignedp, &volatilep, false);
7904 /* If the reference was to a (constant) zero offset, we can use
7905 the address of the base if it has the same base type
7906 as the result type. */
7907 if (! offset && bitpos == 0
7908 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7909 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7910 return fold_convert (type, fold_addr_expr (base));
7913 if ((TREE_CODE (op0) == MODIFY_EXPR
7914 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7915 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7916 /* Detect assigning a bitfield. */
7917 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7919 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7921 /* Don't leave an assignment inside a conversion
7922 unless assigning a bitfield. */
7923 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7924 /* First do the assignment, then return converted constant. */
7925 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7926 TREE_NO_WARNING (tem) = 1;
7927 TREE_USED (tem) = 1;
7931 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7932 constants (if x has signed type, the sign bit cannot be set
7933 in c). This folds extension into the BIT_AND_EXPR. */
7934 if (INTEGRAL_TYPE_P (type)
7935 && TREE_CODE (type) != BOOLEAN_TYPE
7936 && TREE_CODE (op0) == BIT_AND_EXPR
7937 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7940 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7943 if (TYPE_UNSIGNED (TREE_TYPE (and))
7944 || (TYPE_PRECISION (type)
7945 <= TYPE_PRECISION (TREE_TYPE (and))))
7947 else if (TYPE_PRECISION (TREE_TYPE (and1))
7948 <= HOST_BITS_PER_WIDE_INT
7949 && host_integerp (and1, 1))
7951 unsigned HOST_WIDE_INT cst;
7953 cst = tree_low_cst (and1, 1);
7954 cst &= (HOST_WIDE_INT) -1
7955 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7956 change = (cst == 0);
7957 #ifdef LOAD_EXTEND_OP
7959 && !flag_syntax_only
7960 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7963 tree uns = unsigned_type_for (TREE_TYPE (and0));
7964 and0 = fold_convert (uns, and0);
7965 and1 = fold_convert (uns, and1);
7971 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7972 TREE_INT_CST_HIGH (and1), 0,
7973 TREE_OVERFLOW (and1));
7974 return fold_build2 (BIT_AND_EXPR, type,
7975 fold_convert (type, and0), tem);
7979 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7980 when one of the new casts will fold away. Conservatively we assume
7981 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7982 if (POINTER_TYPE_P (type)
7983 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7984 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7985 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7986 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7988 tree arg00 = TREE_OPERAND (arg0, 0);
7989 tree arg01 = TREE_OPERAND (arg0, 1);
7991 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7992 fold_convert (sizetype, arg01));
7995 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7996 of the same precision, and X is an integer type not narrower than
7997 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7998 if (INTEGRAL_TYPE_P (type)
7999 && TREE_CODE (op0) == BIT_NOT_EXPR
8000 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8001 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
8002 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
8003 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8005 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8006 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8007 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8008 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8011 tem = fold_convert_const (code, type, op0);
8012 return tem ? tem : NULL_TREE;
8014 case VIEW_CONVERT_EXPR:
8015 if (TREE_TYPE (op0) == type)
8017 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8018 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8019 return fold_view_convert_expr (type, op0);
8022 tem = fold_negate_expr (arg0);
8024 return fold_convert (type, tem);
8028 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8029 return fold_abs_const (arg0, type);
8030 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8031 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8032 /* Convert fabs((double)float) into (double)fabsf(float). */
8033 else if (TREE_CODE (arg0) == NOP_EXPR
8034 && TREE_CODE (type) == REAL_TYPE)
8036 tree targ0 = strip_float_extensions (arg0);
8038 return fold_convert (type, fold_build1 (ABS_EXPR,
8042 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8043 else if (TREE_CODE (arg0) == ABS_EXPR)
8045 else if (tree_expr_nonnegative_p (arg0))
8048 /* Strip sign ops from argument. */
8049 if (TREE_CODE (type) == REAL_TYPE)
8051 tem = fold_strip_sign_ops (arg0);
8053 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8058 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8059 return fold_convert (type, arg0);
8060 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8062 tree itype = TREE_TYPE (type);
8063 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8064 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8065 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8067 if (TREE_CODE (arg0) == COMPLEX_CST)
8069 tree itype = TREE_TYPE (type);
8070 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8071 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8072 return build_complex (type, rpart, negate_expr (ipart));
8074 if (TREE_CODE (arg0) == CONJ_EXPR)
8075 return fold_convert (type, TREE_OPERAND (arg0, 0));
8079 if (TREE_CODE (arg0) == INTEGER_CST)
8080 return fold_not_const (arg0, type);
8081 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8082 return TREE_OPERAND (arg0, 0);
8083 /* Convert ~ (-A) to A - 1. */
8084 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8085 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
8086 build_int_cst (type, 1));
8087 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8088 else if (INTEGRAL_TYPE_P (type)
8089 && ((TREE_CODE (arg0) == MINUS_EXPR
8090 && integer_onep (TREE_OPERAND (arg0, 1)))
8091 || (TREE_CODE (arg0) == PLUS_EXPR
8092 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8093 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8094 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8095 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8096 && (tem = fold_unary (BIT_NOT_EXPR, type,
8098 TREE_OPERAND (arg0, 0)))))
8099 return fold_build2 (BIT_XOR_EXPR, type, tem,
8100 fold_convert (type, TREE_OPERAND (arg0, 1)));
8101 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8102 && (tem = fold_unary (BIT_NOT_EXPR, type,
8104 TREE_OPERAND (arg0, 1)))))
8105 return fold_build2 (BIT_XOR_EXPR, type,
8106 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8110 case TRUTH_NOT_EXPR:
8111 /* The argument to invert_truthvalue must have Boolean type. */
8112 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8113 arg0 = fold_convert (boolean_type_node, arg0);
8115 /* Note that the operand of this must be an int
8116 and its values must be 0 or 1.
8117 ("true" is a fixed value perhaps depending on the language,
8118 but we don't handle values other than 1 correctly yet.) */
8119 tem = fold_truth_not_expr (arg0);
8122 return fold_convert (type, tem);
8125 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8126 return fold_convert (type, arg0);
8127 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8128 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8129 TREE_OPERAND (arg0, 1));
8130 if (TREE_CODE (arg0) == COMPLEX_CST)
8131 return fold_convert (type, TREE_REALPART (arg0));
8132 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8134 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8135 tem = fold_build2 (TREE_CODE (arg0), itype,
8136 fold_build1 (REALPART_EXPR, itype,
8137 TREE_OPERAND (arg0, 0)),
8138 fold_build1 (REALPART_EXPR, itype,
8139 TREE_OPERAND (arg0, 1)));
8140 return fold_convert (type, tem);
8142 if (TREE_CODE (arg0) == CONJ_EXPR)
8144 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8145 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8146 return fold_convert (type, tem);
8148 if (TREE_CODE (arg0) == CALL_EXPR)
8150 tree fn = get_callee_fndecl (arg0);
8151 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8152 switch (DECL_FUNCTION_CODE (fn))
8154 CASE_FLT_FN (BUILT_IN_CEXPI):
8155 fn = mathfn_built_in (type, BUILT_IN_COS);
8157 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8167 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8168 return fold_convert (type, integer_zero_node);
8169 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8170 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8171 TREE_OPERAND (arg0, 0));
8172 if (TREE_CODE (arg0) == COMPLEX_CST)
8173 return fold_convert (type, TREE_IMAGPART (arg0));
8174 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8176 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8177 tem = fold_build2 (TREE_CODE (arg0), itype,
8178 fold_build1 (IMAGPART_EXPR, itype,
8179 TREE_OPERAND (arg0, 0)),
8180 fold_build1 (IMAGPART_EXPR, itype,
8181 TREE_OPERAND (arg0, 1)));
8182 return fold_convert (type, tem);
8184 if (TREE_CODE (arg0) == CONJ_EXPR)
8186 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8187 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8188 return fold_convert (type, negate_expr (tem));
8190 if (TREE_CODE (arg0) == CALL_EXPR)
8192 tree fn = get_callee_fndecl (arg0);
8193 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8194 switch (DECL_FUNCTION_CODE (fn))
8196 CASE_FLT_FN (BUILT_IN_CEXPI):
8197 fn = mathfn_built_in (type, BUILT_IN_SIN);
8199 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8210 } /* switch (code) */
8213 /* Fold a binary expression of code CODE and type TYPE with operands
8214 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8215 Return the folded expression if folding is successful. Otherwise,
8216 return NULL_TREE. */
8219 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8221 enum tree_code compl_code;
8223 if (code == MIN_EXPR)
8224 compl_code = MAX_EXPR;
8225 else if (code == MAX_EXPR)
8226 compl_code = MIN_EXPR;
8230 /* MIN (MAX (a, b), b) == b. */
8231 if (TREE_CODE (op0) == compl_code
8232 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8233 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8235 /* MIN (MAX (b, a), b) == b. */
8236 if (TREE_CODE (op0) == compl_code
8237 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8238 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8239 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8241 /* MIN (a, MAX (a, b)) == a. */
8242 if (TREE_CODE (op1) == compl_code
8243 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8244 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8245 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8247 /* MIN (a, MAX (b, a)) == a. */
8248 if (TREE_CODE (op1) == compl_code
8249 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8250 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8251 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8256 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8257 by changing CODE to reduce the magnitude of constants involved in
8258 ARG0 of the comparison.
8259 Returns a canonicalized comparison tree if a simplification was
8260 possible, otherwise returns NULL_TREE.
8261 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8262 valid if signed overflow is undefined. */
8265 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8266 tree arg0, tree arg1,
8267 bool *strict_overflow_p)
8269 enum tree_code code0 = TREE_CODE (arg0);
8270 tree t, cst0 = NULL_TREE;
8274 /* Match A +- CST code arg1 and CST code arg1. */
8275 if (!(((code0 == MINUS_EXPR
8276 || code0 == PLUS_EXPR)
8277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8278 || code0 == INTEGER_CST))
8281 /* Identify the constant in arg0 and its sign. */
8282 if (code0 == INTEGER_CST)
8285 cst0 = TREE_OPERAND (arg0, 1);
8286 sgn0 = tree_int_cst_sgn (cst0);
8288 /* Overflowed constants and zero will cause problems. */
8289 if (integer_zerop (cst0)
8290 || TREE_OVERFLOW (cst0))
8293 /* See if we can reduce the magnitude of the constant in
8294 arg0 by changing the comparison code. */
8295 if (code0 == INTEGER_CST)
8297 /* CST <= arg1 -> CST-1 < arg1. */
8298 if (code == LE_EXPR && sgn0 == 1)
8300 /* -CST < arg1 -> -CST-1 <= arg1. */
8301 else if (code == LT_EXPR && sgn0 == -1)
8303 /* CST > arg1 -> CST-1 >= arg1. */
8304 else if (code == GT_EXPR && sgn0 == 1)
8306 /* -CST >= arg1 -> -CST-1 > arg1. */
8307 else if (code == GE_EXPR && sgn0 == -1)
8311 /* arg1 code' CST' might be more canonical. */
8316 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8318 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8320 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8321 else if (code == GT_EXPR
8322 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8324 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8325 else if (code == LE_EXPR
8326 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8328 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8329 else if (code == GE_EXPR
8330 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8334 *strict_overflow_p = true;
8337 /* Now build the constant reduced in magnitude. */
8338 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8339 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8340 if (code0 != INTEGER_CST)
8341 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8343 /* If swapping might yield to a more canonical form, do so. */
8345 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8347 return fold_build2 (code, type, t, arg1);
8350 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8351 overflow further. Try to decrease the magnitude of constants involved
8352 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8353 and put sole constants at the second argument position.
8354 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8357 maybe_canonicalize_comparison (enum tree_code code, tree type,
8358 tree arg0, tree arg1)
8361 bool strict_overflow_p;
8362 const char * const warnmsg = G_("assuming signed overflow does not occur "
8363 "when reducing constant in comparison");
8365 /* In principle pointers also have undefined overflow behavior,
8366 but that causes problems elsewhere. */
8367 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8368 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8371 /* Try canonicalization by simplifying arg0. */
8372 strict_overflow_p = false;
8373 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8374 &strict_overflow_p);
8377 if (strict_overflow_p)
8378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8382 /* Try canonicalization by simplifying arg1 using the swapped
8384 code = swap_tree_comparison (code);
8385 strict_overflow_p = false;
8386 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8387 &strict_overflow_p);
8388 if (t && strict_overflow_p)
8389 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8393 /* Subroutine of fold_binary. This routine performs all of the
8394 transformations that are common to the equality/inequality
8395 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8396 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8397 fold_binary should call fold_binary. Fold a comparison with
8398 tree code CODE and type TYPE with operands OP0 and OP1. Return
8399 the folded comparison or NULL_TREE. */
8402 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8404 tree arg0, arg1, tem;
8409 STRIP_SIGN_NOPS (arg0);
8410 STRIP_SIGN_NOPS (arg1);
8412 tem = fold_relational_const (code, type, arg0, arg1);
8413 if (tem != NULL_TREE)
8416 /* If one arg is a real or integer constant, put it last. */
8417 if (tree_swap_operands_p (arg0, arg1, true))
8418 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8420 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8421 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8422 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8423 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8424 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8425 && (TREE_CODE (arg1) == INTEGER_CST
8426 && !TREE_OVERFLOW (arg1)))
8428 tree const1 = TREE_OPERAND (arg0, 1);
8430 tree variable = TREE_OPERAND (arg0, 0);
8433 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8435 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8436 TREE_TYPE (arg1), const2, const1);
8438 /* If the constant operation overflowed this can be
8439 simplified as a comparison against INT_MAX/INT_MIN. */
8440 if (TREE_CODE (lhs) == INTEGER_CST
8441 && TREE_OVERFLOW (lhs))
8443 int const1_sgn = tree_int_cst_sgn (const1);
8444 enum tree_code code2 = code;
8446 /* Get the sign of the constant on the lhs if the
8447 operation were VARIABLE + CONST1. */
8448 if (TREE_CODE (arg0) == MINUS_EXPR)
8449 const1_sgn = -const1_sgn;
8451 /* The sign of the constant determines if we overflowed
8452 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8453 Canonicalize to the INT_MIN overflow by swapping the comparison
8455 if (const1_sgn == -1)
8456 code2 = swap_tree_comparison (code);
8458 /* We now can look at the canonicalized case
8459 VARIABLE + 1 CODE2 INT_MIN
8460 and decide on the result. */
8461 if (code2 == LT_EXPR
8463 || code2 == EQ_EXPR)
8464 return omit_one_operand (type, boolean_false_node, variable);
8465 else if (code2 == NE_EXPR
8467 || code2 == GT_EXPR)
8468 return omit_one_operand (type, boolean_true_node, variable);
8471 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8472 && (TREE_CODE (lhs) != INTEGER_CST
8473 || !TREE_OVERFLOW (lhs)))
8475 fold_overflow_warning (("assuming signed overflow does not occur "
8476 "when changing X +- C1 cmp C2 to "
8478 WARN_STRICT_OVERFLOW_COMPARISON);
8479 return fold_build2 (code, type, variable, lhs);
8483 /* For comparisons of pointers we can decompose it to a compile time
8484 comparison of the base objects and the offsets into the object.
8485 This requires at least one operand being an ADDR_EXPR to do more
8486 than the operand_equal_p test below. */
8487 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8488 && (TREE_CODE (arg0) == ADDR_EXPR
8489 || TREE_CODE (arg1) == ADDR_EXPR))
8491 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8492 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8493 enum machine_mode mode;
8494 int volatilep, unsignedp;
8495 bool indirect_base0 = false;
8497 /* Get base and offset for the access. Strip ADDR_EXPR for
8498 get_inner_reference, but put it back by stripping INDIRECT_REF
8499 off the base object if possible. */
8501 if (TREE_CODE (arg0) == ADDR_EXPR)
8503 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8504 &bitsize, &bitpos0, &offset0, &mode,
8505 &unsignedp, &volatilep, false);
8506 if (TREE_CODE (base0) == INDIRECT_REF)
8507 base0 = TREE_OPERAND (base0, 0);
8509 indirect_base0 = true;
8513 if (TREE_CODE (arg1) == ADDR_EXPR)
8515 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8516 &bitsize, &bitpos1, &offset1, &mode,
8517 &unsignedp, &volatilep, false);
8518 /* We have to make sure to have an indirect/non-indirect base1
8519 just the same as we did for base0. */
8520 if (TREE_CODE (base1) == INDIRECT_REF
8522 base1 = TREE_OPERAND (base1, 0);
8523 else if (!indirect_base0)
8526 else if (indirect_base0)
8529 /* If we have equivalent bases we might be able to simplify. */
8531 && operand_equal_p (base0, base1, 0))
8533 /* We can fold this expression to a constant if the non-constant
8534 offset parts are equal. */
8535 if (offset0 == offset1
8536 || (offset0 && offset1
8537 && operand_equal_p (offset0, offset1, 0)))
8542 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8544 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8546 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8548 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8550 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8552 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8556 /* We can simplify the comparison to a comparison of the variable
8557 offset parts if the constant offset parts are equal.
8558 Be careful to use signed size type here because otherwise we
8559 mess with array offsets in the wrong way. This is possible
8560 because pointer arithmetic is restricted to retain within an
8561 object and overflow on pointer differences is undefined as of
8562 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8563 else if (bitpos0 == bitpos1)
8565 tree signed_size_type_node;
8566 signed_size_type_node = signed_type_for (size_type_node);
8568 /* By converting to signed size type we cover middle-end pointer
8569 arithmetic which operates on unsigned pointer types of size
8570 type size and ARRAY_REF offsets which are properly sign or
8571 zero extended from their type in case it is narrower than
8573 if (offset0 == NULL_TREE)
8574 offset0 = build_int_cst (signed_size_type_node, 0);
8576 offset0 = fold_convert (signed_size_type_node, offset0);
8577 if (offset1 == NULL_TREE)
8578 offset1 = build_int_cst (signed_size_type_node, 0);
8580 offset1 = fold_convert (signed_size_type_node, offset1);
8582 return fold_build2 (code, type, offset0, offset1);
8587 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8588 same object, then we can fold this to a comparison of the two offsets in
8589 signed size type. This is possible because pointer arithmetic is
8590 restricted to retain within an object and overflow on pointer differences
8591 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t.
8593 We check flag_wrapv directly because pointers types are unsigned,
8594 and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is
8595 normally what we want to avoid certain odd overflow cases, but
8597 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8599 && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0)))
8601 tree base0, offset0, base1, offset1;
8603 if (extract_array_ref (arg0, &base0, &offset0)
8604 && extract_array_ref (arg1, &base1, &offset1)
8605 && operand_equal_p (base0, base1, 0))
8607 tree signed_size_type_node;
8608 signed_size_type_node = signed_type_for (size_type_node);
8610 /* By converting to signed size type we cover middle-end pointer
8611 arithmetic which operates on unsigned pointer types of size
8612 type size and ARRAY_REF offsets which are properly sign or
8613 zero extended from their type in case it is narrower than
8615 if (offset0 == NULL_TREE)
8616 offset0 = build_int_cst (signed_size_type_node, 0);
8618 offset0 = fold_convert (signed_size_type_node, offset0);
8619 if (offset1 == NULL_TREE)
8620 offset1 = build_int_cst (signed_size_type_node, 0);
8622 offset1 = fold_convert (signed_size_type_node, offset1);
8624 return fold_build2 (code, type, offset0, offset1);
8628 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8629 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8630 the resulting offset is smaller in absolute value than the
8632 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8633 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8634 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8635 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8636 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8637 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8638 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8640 tree const1 = TREE_OPERAND (arg0, 1);
8641 tree const2 = TREE_OPERAND (arg1, 1);
8642 tree variable1 = TREE_OPERAND (arg0, 0);
8643 tree variable2 = TREE_OPERAND (arg1, 0);
8645 const char * const warnmsg = G_("assuming signed overflow does not "
8646 "occur when combining constants around "
8649 /* Put the constant on the side where it doesn't overflow and is
8650 of lower absolute value than before. */
8651 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8652 ? MINUS_EXPR : PLUS_EXPR,
8654 if (!TREE_OVERFLOW (cst)
8655 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8657 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8658 return fold_build2 (code, type,
8660 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8664 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8665 ? MINUS_EXPR : PLUS_EXPR,
8667 if (!TREE_OVERFLOW (cst)
8668 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8670 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8671 return fold_build2 (code, type,
8672 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8678 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8679 signed arithmetic case. That form is created by the compiler
8680 often enough for folding it to be of value. One example is in
8681 computing loop trip counts after Operator Strength Reduction. */
8682 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8683 && TREE_CODE (arg0) == MULT_EXPR
8684 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8685 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8686 && integer_zerop (arg1))
8688 tree const1 = TREE_OPERAND (arg0, 1);
8689 tree const2 = arg1; /* zero */
8690 tree variable1 = TREE_OPERAND (arg0, 0);
8691 enum tree_code cmp_code = code;
8693 gcc_assert (!integer_zerop (const1));
8695 fold_overflow_warning (("assuming signed overflow does not occur when "
8696 "eliminating multiplication in comparison "
8698 WARN_STRICT_OVERFLOW_COMPARISON);
8700 /* If const1 is negative we swap the sense of the comparison. */
8701 if (tree_int_cst_sgn (const1) < 0)
8702 cmp_code = swap_tree_comparison (cmp_code);
8704 return fold_build2 (cmp_code, type, variable1, const2);
8707 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8711 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8713 tree targ0 = strip_float_extensions (arg0);
8714 tree targ1 = strip_float_extensions (arg1);
8715 tree newtype = TREE_TYPE (targ0);
8717 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8718 newtype = TREE_TYPE (targ1);
8720 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8721 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8722 return fold_build2 (code, type, fold_convert (newtype, targ0),
8723 fold_convert (newtype, targ1));
8725 /* (-a) CMP (-b) -> b CMP a */
8726 if (TREE_CODE (arg0) == NEGATE_EXPR
8727 && TREE_CODE (arg1) == NEGATE_EXPR)
8728 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8729 TREE_OPERAND (arg0, 0));
8731 if (TREE_CODE (arg1) == REAL_CST)
8733 REAL_VALUE_TYPE cst;
8734 cst = TREE_REAL_CST (arg1);
8736 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8737 if (TREE_CODE (arg0) == NEGATE_EXPR)
8738 return fold_build2 (swap_tree_comparison (code), type,
8739 TREE_OPERAND (arg0, 0),
8740 build_real (TREE_TYPE (arg1),
8741 REAL_VALUE_NEGATE (cst)));
8743 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8744 /* a CMP (-0) -> a CMP 0 */
8745 if (REAL_VALUE_MINUS_ZERO (cst))
8746 return fold_build2 (code, type, arg0,
8747 build_real (TREE_TYPE (arg1), dconst0));
8749 /* x != NaN is always true, other ops are always false. */
8750 if (REAL_VALUE_ISNAN (cst)
8751 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8753 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8754 return omit_one_operand (type, tem, arg0);
8757 /* Fold comparisons against infinity. */
8758 if (REAL_VALUE_ISINF (cst))
8760 tem = fold_inf_compare (code, type, arg0, arg1);
8761 if (tem != NULL_TREE)
8766 /* If this is a comparison of a real constant with a PLUS_EXPR
8767 or a MINUS_EXPR of a real constant, we can convert it into a
8768 comparison with a revised real constant as long as no overflow
8769 occurs when unsafe_math_optimizations are enabled. */
8770 if (flag_unsafe_math_optimizations
8771 && TREE_CODE (arg1) == REAL_CST
8772 && (TREE_CODE (arg0) == PLUS_EXPR
8773 || TREE_CODE (arg0) == MINUS_EXPR)
8774 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8775 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8776 ? MINUS_EXPR : PLUS_EXPR,
8777 arg1, TREE_OPERAND (arg0, 1), 0))
8778 && !TREE_OVERFLOW (tem))
8779 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8781 /* Likewise, we can simplify a comparison of a real constant with
8782 a MINUS_EXPR whose first operand is also a real constant, i.e.
8783 (c1 - x) < c2 becomes x > c1-c2. */
8784 if (flag_unsafe_math_optimizations
8785 && TREE_CODE (arg1) == REAL_CST
8786 && TREE_CODE (arg0) == MINUS_EXPR
8787 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8788 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8790 && !TREE_OVERFLOW (tem))
8791 return fold_build2 (swap_tree_comparison (code), type,
8792 TREE_OPERAND (arg0, 1), tem);
8794 /* Fold comparisons against built-in math functions. */
8795 if (TREE_CODE (arg1) == REAL_CST
8796 && flag_unsafe_math_optimizations
8797 && ! flag_errno_math)
8799 enum built_in_function fcode = builtin_mathfn_code (arg0);
8801 if (fcode != END_BUILTINS)
8803 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8804 if (tem != NULL_TREE)
8810 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8811 && (TREE_CODE (arg0) == NOP_EXPR
8812 || TREE_CODE (arg0) == CONVERT_EXPR))
8814 /* If we are widening one operand of an integer comparison,
8815 see if the other operand is similarly being widened. Perhaps we
8816 can do the comparison in the narrower type. */
8817 tem = fold_widened_comparison (code, type, arg0, arg1);
8821 /* Or if we are changing signedness. */
8822 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8827 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8828 constant, we can simplify it. */
8829 if (TREE_CODE (arg1) == INTEGER_CST
8830 && (TREE_CODE (arg0) == MIN_EXPR
8831 || TREE_CODE (arg0) == MAX_EXPR)
8832 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8834 tem = optimize_minmax_comparison (code, type, op0, op1);
8839 /* Simplify comparison of something with itself. (For IEEE
8840 floating-point, we can only do some of these simplifications.) */
8841 if (operand_equal_p (arg0, arg1, 0))
8846 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8847 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8848 return constant_boolean_node (1, type);
8853 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8854 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8855 return constant_boolean_node (1, type);
8856 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8859 /* For NE, we can only do this simplification if integer
8860 or we don't honor IEEE floating point NaNs. */
8861 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8862 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8864 /* ... fall through ... */
8867 return constant_boolean_node (0, type);
8873 /* If we are comparing an expression that just has comparisons
8874 of two integer values, arithmetic expressions of those comparisons,
8875 and constants, we can simplify it. There are only three cases
8876 to check: the two values can either be equal, the first can be
8877 greater, or the second can be greater. Fold the expression for
8878 those three values. Since each value must be 0 or 1, we have
8879 eight possibilities, each of which corresponds to the constant 0
8880 or 1 or one of the six possible comparisons.
8882 This handles common cases like (a > b) == 0 but also handles
8883 expressions like ((x > y) - (y > x)) > 0, which supposedly
8884 occur in macroized code. */
8886 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8888 tree cval1 = 0, cval2 = 0;
8891 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8892 /* Don't handle degenerate cases here; they should already
8893 have been handled anyway. */
8894 && cval1 != 0 && cval2 != 0
8895 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8896 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8897 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8898 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8899 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8900 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8901 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8903 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8904 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8906 /* We can't just pass T to eval_subst in case cval1 or cval2
8907 was the same as ARG1. */
8910 = fold_build2 (code, type,
8911 eval_subst (arg0, cval1, maxval,
8915 = fold_build2 (code, type,
8916 eval_subst (arg0, cval1, maxval,
8920 = fold_build2 (code, type,
8921 eval_subst (arg0, cval1, minval,
8925 /* All three of these results should be 0 or 1. Confirm they are.
8926 Then use those values to select the proper code to use. */
8928 if (TREE_CODE (high_result) == INTEGER_CST
8929 && TREE_CODE (equal_result) == INTEGER_CST
8930 && TREE_CODE (low_result) == INTEGER_CST)
8932 /* Make a 3-bit mask with the high-order bit being the
8933 value for `>', the next for '=', and the low for '<'. */
8934 switch ((integer_onep (high_result) * 4)
8935 + (integer_onep (equal_result) * 2)
8936 + integer_onep (low_result))
8940 return omit_one_operand (type, integer_zero_node, arg0);
8961 return omit_one_operand (type, integer_one_node, arg0);
8965 return save_expr (build2 (code, type, cval1, cval2));
8966 return fold_build2 (code, type, cval1, cval2);
8971 /* Fold a comparison of the address of COMPONENT_REFs with the same
8972 type and component to a comparison of the address of the base
8973 object. In short, &x->a OP &y->a to x OP y and
8974 &x->a OP &y.a to x OP &y */
8975 if (TREE_CODE (arg0) == ADDR_EXPR
8976 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8977 && TREE_CODE (arg1) == ADDR_EXPR
8978 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8980 tree cref0 = TREE_OPERAND (arg0, 0);
8981 tree cref1 = TREE_OPERAND (arg1, 0);
8982 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8984 tree op0 = TREE_OPERAND (cref0, 0);
8985 tree op1 = TREE_OPERAND (cref1, 0);
8986 return fold_build2 (code, type,
8987 fold_addr_expr (op0),
8988 fold_addr_expr (op1));
8992 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8993 into a single range test. */
8994 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8995 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8996 && TREE_CODE (arg1) == INTEGER_CST
8997 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8998 && !integer_zerop (TREE_OPERAND (arg0, 1))
8999 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9000 && !TREE_OVERFLOW (arg1))
9002 tem = fold_div_compare (code, type, arg0, arg1);
9003 if (tem != NULL_TREE)
9007 /* Fold ~X op ~Y as Y op X. */
9008 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9009 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9011 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9012 return fold_build2 (code, type,
9013 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9014 TREE_OPERAND (arg0, 0));
9017 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9018 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9019 && TREE_CODE (arg1) == INTEGER_CST)
9021 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9022 return fold_build2 (swap_tree_comparison (code), type,
9023 TREE_OPERAND (arg0, 0),
9024 fold_build1 (BIT_NOT_EXPR, cmp_type,
9025 fold_convert (cmp_type, arg1)));
9032 /* Subroutine of fold_binary. Optimize complex multiplications of the
9033 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9034 argument EXPR represents the expression "z" of type TYPE. */
9037 fold_mult_zconjz (tree type, tree expr)
9039 tree itype = TREE_TYPE (type);
9040 tree rpart, ipart, tem;
9042 if (TREE_CODE (expr) == COMPLEX_EXPR)
9044 rpart = TREE_OPERAND (expr, 0);
9045 ipart = TREE_OPERAND (expr, 1);
9047 else if (TREE_CODE (expr) == COMPLEX_CST)
9049 rpart = TREE_REALPART (expr);
9050 ipart = TREE_IMAGPART (expr);
9054 expr = save_expr (expr);
9055 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9056 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9059 rpart = save_expr (rpart);
9060 ipart = save_expr (ipart);
9061 tem = fold_build2 (PLUS_EXPR, itype,
9062 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9063 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9064 return fold_build2 (COMPLEX_EXPR, type, tem,
9065 fold_convert (itype, integer_zero_node));
9069 /* Fold a binary expression of code CODE and type TYPE with operands
9070 OP0 and OP1. Return the folded expression if folding is
9071 successful. Otherwise, return NULL_TREE. */
9074 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9076 enum tree_code_class kind = TREE_CODE_CLASS (code);
9077 tree arg0, arg1, tem;
9078 tree t1 = NULL_TREE;
9079 bool strict_overflow_p;
9081 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9082 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9083 && TREE_CODE_LENGTH (code) == 2
9085 && op1 != NULL_TREE);
9090 /* Strip any conversions that don't change the mode. This is
9091 safe for every expression, except for a comparison expression
9092 because its signedness is derived from its operands. So, in
9093 the latter case, only strip conversions that don't change the
9096 Note that this is done as an internal manipulation within the
9097 constant folder, in order to find the simplest representation
9098 of the arguments so that their form can be studied. In any
9099 cases, the appropriate type conversions should be put back in
9100 the tree that will get out of the constant folder. */
9102 if (kind == tcc_comparison)
9104 STRIP_SIGN_NOPS (arg0);
9105 STRIP_SIGN_NOPS (arg1);
9113 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9114 constant but we can't do arithmetic on them. */
9115 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9116 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9117 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9118 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9120 if (kind == tcc_binary)
9121 tem = const_binop (code, arg0, arg1, 0);
9122 else if (kind == tcc_comparison)
9123 tem = fold_relational_const (code, type, arg0, arg1);
9127 if (tem != NULL_TREE)
9129 if (TREE_TYPE (tem) != type)
9130 tem = fold_convert (type, tem);
9135 /* If this is a commutative operation, and ARG0 is a constant, move it
9136 to ARG1 to reduce the number of tests below. */
9137 if (commutative_tree_code (code)
9138 && tree_swap_operands_p (arg0, arg1, true))
9139 return fold_build2 (code, type, op1, op0);
9141 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9143 First check for cases where an arithmetic operation is applied to a
9144 compound, conditional, or comparison operation. Push the arithmetic
9145 operation inside the compound or conditional to see if any folding
9146 can then be done. Convert comparison to conditional for this purpose.
9147 The also optimizes non-constant cases that used to be done in
9150 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9151 one of the operands is a comparison and the other is a comparison, a
9152 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9153 code below would make the expression more complex. Change it to a
9154 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9155 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9157 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9158 || code == EQ_EXPR || code == NE_EXPR)
9159 && ((truth_value_p (TREE_CODE (arg0))
9160 && (truth_value_p (TREE_CODE (arg1))
9161 || (TREE_CODE (arg1) == BIT_AND_EXPR
9162 && integer_onep (TREE_OPERAND (arg1, 1)))))
9163 || (truth_value_p (TREE_CODE (arg1))
9164 && (truth_value_p (TREE_CODE (arg0))
9165 || (TREE_CODE (arg0) == BIT_AND_EXPR
9166 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9168 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9169 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9172 fold_convert (boolean_type_node, arg0),
9173 fold_convert (boolean_type_node, arg1));
9175 if (code == EQ_EXPR)
9176 tem = invert_truthvalue (tem);
9178 return fold_convert (type, tem);
9181 if (TREE_CODE_CLASS (code) == tcc_binary
9182 || TREE_CODE_CLASS (code) == tcc_comparison)
9184 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9185 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9186 fold_build2 (code, type,
9187 TREE_OPERAND (arg0, 1), op1));
9188 if (TREE_CODE (arg1) == COMPOUND_EXPR
9189 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9190 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9191 fold_build2 (code, type,
9192 op0, TREE_OPERAND (arg1, 1)));
9194 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9196 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9198 /*cond_first_p=*/1);
9199 if (tem != NULL_TREE)
9203 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9205 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9207 /*cond_first_p=*/0);
9208 if (tem != NULL_TREE)
9215 case POINTER_PLUS_EXPR:
9216 /* 0 +p index -> (type)index */
9217 if (integer_zerop (arg0))
9218 return non_lvalue (fold_convert (type, arg1));
9220 /* PTR +p 0 -> PTR */
9221 if (integer_zerop (arg1))
9222 return non_lvalue (fold_convert (type, arg0));
9224 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9225 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9226 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9227 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9228 fold_convert (sizetype, arg1),
9229 fold_convert (sizetype, arg0)));
9231 /* index +p PTR -> PTR +p index */
9232 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9233 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9234 return fold_build2 (POINTER_PLUS_EXPR, type,
9235 fold_convert (type, arg1), fold_convert (sizetype, arg0));
9237 /* (PTR +p B) +p A -> PTR +p (B + A) */
9238 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9241 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9242 tree arg00 = TREE_OPERAND (arg0, 0);
9243 inner = fold_build2 (PLUS_EXPR, sizetype, arg01, fold_convert (sizetype, arg1));
9244 return fold_build2 (POINTER_PLUS_EXPR, type, arg00, inner);
9247 /* PTR_CST +p CST -> CST1 */
9248 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9249 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9251 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9252 of the array. Loop optimizer sometimes produce this type of
9254 if (TREE_CODE (arg0) == ADDR_EXPR)
9256 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9258 return fold_convert (type, tem);
9263 /* PTR + INT -> (INT)(PTR p+ INT) */
9264 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9265 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9266 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9269 fold_convert (sizetype, arg1)));
9270 /* INT + PTR -> (INT)(PTR p+ INT) */
9271 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9272 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9273 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9276 fold_convert (sizetype, arg0)));
9277 /* A + (-B) -> A - B */
9278 if (TREE_CODE (arg1) == NEGATE_EXPR)
9279 return fold_build2 (MINUS_EXPR, type,
9280 fold_convert (type, arg0),
9281 fold_convert (type, TREE_OPERAND (arg1, 0)));
9282 /* (-A) + B -> B - A */
9283 if (TREE_CODE (arg0) == NEGATE_EXPR
9284 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9285 return fold_build2 (MINUS_EXPR, type,
9286 fold_convert (type, arg1),
9287 fold_convert (type, TREE_OPERAND (arg0, 0)));
9289 if (INTEGRAL_TYPE_P (type))
9291 /* Convert ~A + 1 to -A. */
9292 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9293 && integer_onep (arg1))
9294 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9297 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9298 && !TYPE_OVERFLOW_TRAPS (type))
9300 tree tem = TREE_OPERAND (arg0, 0);
9303 if (operand_equal_p (tem, arg1, 0))
9305 t1 = build_int_cst_type (type, -1);
9306 return omit_one_operand (type, t1, arg1);
9311 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9312 && !TYPE_OVERFLOW_TRAPS (type))
9314 tree tem = TREE_OPERAND (arg1, 0);
9317 if (operand_equal_p (arg0, tem, 0))
9319 t1 = build_int_cst_type (type, -1);
9320 return omit_one_operand (type, t1, arg0);
9325 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9327 if ((TREE_CODE (arg0) == MULT_EXPR
9328 || TREE_CODE (arg1) == MULT_EXPR)
9329 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9331 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9336 if (! FLOAT_TYPE_P (type))
9338 if (integer_zerop (arg1))
9339 return non_lvalue (fold_convert (type, arg0));
9341 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9342 with a constant, and the two constants have no bits in common,
9343 we should treat this as a BIT_IOR_EXPR since this may produce more
9345 if (TREE_CODE (arg0) == BIT_AND_EXPR
9346 && TREE_CODE (arg1) == BIT_AND_EXPR
9347 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9348 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9349 && integer_zerop (const_binop (BIT_AND_EXPR,
9350 TREE_OPERAND (arg0, 1),
9351 TREE_OPERAND (arg1, 1), 0)))
9353 code = BIT_IOR_EXPR;
9357 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9358 (plus (plus (mult) (mult)) (foo)) so that we can
9359 take advantage of the factoring cases below. */
9360 if (((TREE_CODE (arg0) == PLUS_EXPR
9361 || TREE_CODE (arg0) == MINUS_EXPR)
9362 && TREE_CODE (arg1) == MULT_EXPR)
9363 || ((TREE_CODE (arg1) == PLUS_EXPR
9364 || TREE_CODE (arg1) == MINUS_EXPR)
9365 && TREE_CODE (arg0) == MULT_EXPR))
9367 tree parg0, parg1, parg, marg;
9368 enum tree_code pcode;
9370 if (TREE_CODE (arg1) == MULT_EXPR)
9371 parg = arg0, marg = arg1;
9373 parg = arg1, marg = arg0;
9374 pcode = TREE_CODE (parg);
9375 parg0 = TREE_OPERAND (parg, 0);
9376 parg1 = TREE_OPERAND (parg, 1);
9380 if (TREE_CODE (parg0) == MULT_EXPR
9381 && TREE_CODE (parg1) != MULT_EXPR)
9382 return fold_build2 (pcode, type,
9383 fold_build2 (PLUS_EXPR, type,
9384 fold_convert (type, parg0),
9385 fold_convert (type, marg)),
9386 fold_convert (type, parg1));
9387 if (TREE_CODE (parg0) != MULT_EXPR
9388 && TREE_CODE (parg1) == MULT_EXPR)
9389 return fold_build2 (PLUS_EXPR, type,
9390 fold_convert (type, parg0),
9391 fold_build2 (pcode, type,
9392 fold_convert (type, marg),
9399 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9400 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9401 return non_lvalue (fold_convert (type, arg0));
9403 /* Likewise if the operands are reversed. */
9404 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9405 return non_lvalue (fold_convert (type, arg1));
9407 /* Convert X + -C into X - C. */
9408 if (TREE_CODE (arg1) == REAL_CST
9409 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9411 tem = fold_negate_const (arg1, type);
9412 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9413 return fold_build2 (MINUS_EXPR, type,
9414 fold_convert (type, arg0),
9415 fold_convert (type, tem));
9418 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9419 to __complex__ ( x, y ). This is not the same for SNaNs or
9420 if signed zeros are involved. */
9421 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9422 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9423 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9425 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9426 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9427 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9428 bool arg0rz = false, arg0iz = false;
9429 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9430 || (arg0i && (arg0iz = real_zerop (arg0i))))
9432 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9433 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9434 if (arg0rz && arg1i && real_zerop (arg1i))
9436 tree rp = arg1r ? arg1r
9437 : build1 (REALPART_EXPR, rtype, arg1);
9438 tree ip = arg0i ? arg0i
9439 : build1 (IMAGPART_EXPR, rtype, arg0);
9440 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9442 else if (arg0iz && arg1r && real_zerop (arg1r))
9444 tree rp = arg0r ? arg0r
9445 : build1 (REALPART_EXPR, rtype, arg0);
9446 tree ip = arg1i ? arg1i
9447 : build1 (IMAGPART_EXPR, rtype, arg1);
9448 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9453 if (flag_unsafe_math_optimizations
9454 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9455 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9456 && (tem = distribute_real_division (code, type, arg0, arg1)))
9459 /* Convert x+x into x*2.0. */
9460 if (operand_equal_p (arg0, arg1, 0)
9461 && SCALAR_FLOAT_TYPE_P (type))
9462 return fold_build2 (MULT_EXPR, type, arg0,
9463 build_real (type, dconst2));
9465 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
9466 if (flag_unsafe_math_optimizations
9467 && TREE_CODE (arg1) == PLUS_EXPR
9468 && TREE_CODE (arg0) != MULT_EXPR)
9470 tree tree10 = TREE_OPERAND (arg1, 0);
9471 tree tree11 = TREE_OPERAND (arg1, 1);
9472 if (TREE_CODE (tree11) == MULT_EXPR
9473 && TREE_CODE (tree10) == MULT_EXPR)
9476 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9477 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9480 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
9481 if (flag_unsafe_math_optimizations
9482 && TREE_CODE (arg0) == PLUS_EXPR
9483 && TREE_CODE (arg1) != MULT_EXPR)
9485 tree tree00 = TREE_OPERAND (arg0, 0);
9486 tree tree01 = TREE_OPERAND (arg0, 1);
9487 if (TREE_CODE (tree01) == MULT_EXPR
9488 && TREE_CODE (tree00) == MULT_EXPR)
9491 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9492 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9498 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9499 is a rotate of A by C1 bits. */
9500 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9501 is a rotate of A by B bits. */
9503 enum tree_code code0, code1;
9504 code0 = TREE_CODE (arg0);
9505 code1 = TREE_CODE (arg1);
9506 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9507 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9508 && operand_equal_p (TREE_OPERAND (arg0, 0),
9509 TREE_OPERAND (arg1, 0), 0)
9510 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9512 tree tree01, tree11;
9513 enum tree_code code01, code11;
9515 tree01 = TREE_OPERAND (arg0, 1);
9516 tree11 = TREE_OPERAND (arg1, 1);
9517 STRIP_NOPS (tree01);
9518 STRIP_NOPS (tree11);
9519 code01 = TREE_CODE (tree01);
9520 code11 = TREE_CODE (tree11);
9521 if (code01 == INTEGER_CST
9522 && code11 == INTEGER_CST
9523 && TREE_INT_CST_HIGH (tree01) == 0
9524 && TREE_INT_CST_HIGH (tree11) == 0
9525 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9526 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9527 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9528 code0 == LSHIFT_EXPR ? tree01 : tree11);
9529 else if (code11 == MINUS_EXPR)
9531 tree tree110, tree111;
9532 tree110 = TREE_OPERAND (tree11, 0);
9533 tree111 = TREE_OPERAND (tree11, 1);
9534 STRIP_NOPS (tree110);
9535 STRIP_NOPS (tree111);
9536 if (TREE_CODE (tree110) == INTEGER_CST
9537 && 0 == compare_tree_int (tree110,
9539 (TREE_TYPE (TREE_OPERAND
9541 && operand_equal_p (tree01, tree111, 0))
9542 return build2 ((code0 == LSHIFT_EXPR
9545 type, TREE_OPERAND (arg0, 0), tree01);
9547 else if (code01 == MINUS_EXPR)
9549 tree tree010, tree011;
9550 tree010 = TREE_OPERAND (tree01, 0);
9551 tree011 = TREE_OPERAND (tree01, 1);
9552 STRIP_NOPS (tree010);
9553 STRIP_NOPS (tree011);
9554 if (TREE_CODE (tree010) == INTEGER_CST
9555 && 0 == compare_tree_int (tree010,
9557 (TREE_TYPE (TREE_OPERAND
9559 && operand_equal_p (tree11, tree011, 0))
9560 return build2 ((code0 != LSHIFT_EXPR
9563 type, TREE_OPERAND (arg0, 0), tree11);
9569 /* In most languages, can't associate operations on floats through
9570 parentheses. Rather than remember where the parentheses were, we
9571 don't associate floats at all, unless the user has specified
9572 -funsafe-math-optimizations. */
9574 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9576 tree var0, con0, lit0, minus_lit0;
9577 tree var1, con1, lit1, minus_lit1;
9580 /* Split both trees into variables, constants, and literals. Then
9581 associate each group together, the constants with literals,
9582 then the result with variables. This increases the chances of
9583 literals being recombined later and of generating relocatable
9584 expressions for the sum of a constant and literal. */
9585 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9586 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9587 code == MINUS_EXPR);
9589 /* With undefined overflow we can only associate constants
9590 with one variable. */
9591 if ((POINTER_TYPE_P (type)
9592 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9598 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9599 tmp0 = TREE_OPERAND (tmp0, 0);
9600 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9601 tmp1 = TREE_OPERAND (tmp1, 0);
9602 /* The only case we can still associate with two variables
9603 is if they are the same, modulo negation. */
9604 if (!operand_equal_p (tmp0, tmp1, 0))
9608 /* Only do something if we found more than two objects. Otherwise,
9609 nothing has changed and we risk infinite recursion. */
9611 && (2 < ((var0 != 0) + (var1 != 0)
9612 + (con0 != 0) + (con1 != 0)
9613 + (lit0 != 0) + (lit1 != 0)
9614 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9616 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9617 if (code == MINUS_EXPR)
9620 var0 = associate_trees (var0, var1, code, type);
9621 con0 = associate_trees (con0, con1, code, type);
9622 lit0 = associate_trees (lit0, lit1, code, type);
9623 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9625 /* Preserve the MINUS_EXPR if the negative part of the literal is
9626 greater than the positive part. Otherwise, the multiplicative
9627 folding code (i.e extract_muldiv) may be fooled in case
9628 unsigned constants are subtracted, like in the following
9629 example: ((X*2 + 4) - 8U)/2. */
9630 if (minus_lit0 && lit0)
9632 if (TREE_CODE (lit0) == INTEGER_CST
9633 && TREE_CODE (minus_lit0) == INTEGER_CST
9634 && tree_int_cst_lt (lit0, minus_lit0))
9636 minus_lit0 = associate_trees (minus_lit0, lit0,
9642 lit0 = associate_trees (lit0, minus_lit0,
9650 return fold_convert (type,
9651 associate_trees (var0, minus_lit0,
9655 con0 = associate_trees (con0, minus_lit0,
9657 return fold_convert (type,
9658 associate_trees (var0, con0,
9663 con0 = associate_trees (con0, lit0, code, type);
9664 return fold_convert (type, associate_trees (var0, con0,
9672 /* Pointer simplifications for subtraction, simple reassociations. */
9673 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9675 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9676 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9677 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9679 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9680 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9681 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9682 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9683 return fold_build2 (PLUS_EXPR, type,
9684 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9685 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9687 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9688 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9690 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9691 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9692 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9694 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9697 /* A - (-B) -> A + B */
9698 if (TREE_CODE (arg1) == NEGATE_EXPR)
9699 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9700 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9701 if (TREE_CODE (arg0) == NEGATE_EXPR
9702 && (FLOAT_TYPE_P (type)
9703 || INTEGRAL_TYPE_P (type))
9704 && negate_expr_p (arg1)
9705 && reorder_operands_p (arg0, arg1))
9706 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9707 TREE_OPERAND (arg0, 0));
9708 /* Convert -A - 1 to ~A. */
9709 if (INTEGRAL_TYPE_P (type)
9710 && TREE_CODE (arg0) == NEGATE_EXPR
9711 && integer_onep (arg1)
9712 && !TYPE_OVERFLOW_TRAPS (type))
9713 return fold_build1 (BIT_NOT_EXPR, type,
9714 fold_convert (type, TREE_OPERAND (arg0, 0)));
9716 /* Convert -1 - A to ~A. */
9717 if (INTEGRAL_TYPE_P (type)
9718 && integer_all_onesp (arg0))
9719 return fold_build1 (BIT_NOT_EXPR, type, op1);
9721 if (! FLOAT_TYPE_P (type))
9723 if (integer_zerop (arg0))
9724 return negate_expr (fold_convert (type, arg1));
9725 if (integer_zerop (arg1))
9726 return non_lvalue (fold_convert (type, arg0));
9728 /* Fold A - (A & B) into ~B & A. */
9729 if (!TREE_SIDE_EFFECTS (arg0)
9730 && TREE_CODE (arg1) == BIT_AND_EXPR)
9732 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9734 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9735 return fold_build2 (BIT_AND_EXPR, type,
9736 fold_build1 (BIT_NOT_EXPR, type, arg10),
9737 fold_convert (type, arg0));
9739 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9741 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9742 return fold_build2 (BIT_AND_EXPR, type,
9743 fold_build1 (BIT_NOT_EXPR, type, arg11),
9744 fold_convert (type, arg0));
9748 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9749 any power of 2 minus 1. */
9750 if (TREE_CODE (arg0) == BIT_AND_EXPR
9751 && TREE_CODE (arg1) == BIT_AND_EXPR
9752 && operand_equal_p (TREE_OPERAND (arg0, 0),
9753 TREE_OPERAND (arg1, 0), 0))
9755 tree mask0 = TREE_OPERAND (arg0, 1);
9756 tree mask1 = TREE_OPERAND (arg1, 1);
9757 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9759 if (operand_equal_p (tem, mask1, 0))
9761 tem = fold_build2 (BIT_XOR_EXPR, type,
9762 TREE_OPERAND (arg0, 0), mask1);
9763 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9768 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9769 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9770 return non_lvalue (fold_convert (type, arg0));
9772 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9773 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9774 (-ARG1 + ARG0) reduces to -ARG1. */
9775 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9776 return negate_expr (fold_convert (type, arg1));
9778 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9779 __complex__ ( x, -y ). This is not the same for SNaNs or if
9780 signed zeros are involved. */
9781 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9782 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9783 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9785 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9786 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9787 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9788 bool arg0rz = false, arg0iz = false;
9789 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9790 || (arg0i && (arg0iz = real_zerop (arg0i))))
9792 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9793 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9794 if (arg0rz && arg1i && real_zerop (arg1i))
9796 tree rp = fold_build1 (NEGATE_EXPR, rtype,
9798 : build1 (REALPART_EXPR, rtype, arg1));
9799 tree ip = arg0i ? arg0i
9800 : build1 (IMAGPART_EXPR, rtype, arg0);
9801 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9803 else if (arg0iz && arg1r && real_zerop (arg1r))
9805 tree rp = arg0r ? arg0r
9806 : build1 (REALPART_EXPR, rtype, arg0);
9807 tree ip = fold_build1 (NEGATE_EXPR, rtype,
9809 : build1 (IMAGPART_EXPR, rtype, arg1));
9810 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9815 /* Fold &x - &x. This can happen from &x.foo - &x.
9816 This is unsafe for certain floats even in non-IEEE formats.
9817 In IEEE, it is unsafe because it does wrong for NaNs.
9818 Also note that operand_equal_p is always false if an operand
9821 if ((! FLOAT_TYPE_P (type)
9822 || (flag_unsafe_math_optimizations
9823 && !HONOR_NANS (TYPE_MODE (type))
9824 && !HONOR_INFINITIES (TYPE_MODE (type))))
9825 && operand_equal_p (arg0, arg1, 0))
9826 return fold_convert (type, integer_zero_node);
9828 /* A - B -> A + (-B) if B is easily negatable. */
9829 if (negate_expr_p (arg1)
9830 && ((FLOAT_TYPE_P (type)
9831 /* Avoid this transformation if B is a positive REAL_CST. */
9832 && (TREE_CODE (arg1) != REAL_CST
9833 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9834 || INTEGRAL_TYPE_P (type)))
9835 return fold_build2 (PLUS_EXPR, type,
9836 fold_convert (type, arg0),
9837 fold_convert (type, negate_expr (arg1)));
9839 /* Try folding difference of addresses. */
9843 if ((TREE_CODE (arg0) == ADDR_EXPR
9844 || TREE_CODE (arg1) == ADDR_EXPR)
9845 && ptr_difference_const (arg0, arg1, &diff))
9846 return build_int_cst_type (type, diff);
9849 /* Fold &a[i] - &a[j] to i-j. */
9850 if (TREE_CODE (arg0) == ADDR_EXPR
9851 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9852 && TREE_CODE (arg1) == ADDR_EXPR
9853 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9855 tree aref0 = TREE_OPERAND (arg0, 0);
9856 tree aref1 = TREE_OPERAND (arg1, 0);
9857 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9858 TREE_OPERAND (aref1, 0), 0))
9860 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9861 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9862 tree esz = array_ref_element_size (aref0);
9863 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9864 return fold_build2 (MULT_EXPR, type, diff,
9865 fold_convert (type, esz));
9870 if (flag_unsafe_math_optimizations
9871 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9872 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9873 && (tem = distribute_real_division (code, type, arg0, arg1)))
9876 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9878 if ((TREE_CODE (arg0) == MULT_EXPR
9879 || TREE_CODE (arg1) == MULT_EXPR)
9880 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9882 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9890 /* (-A) * (-B) -> A * B */
9891 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9892 return fold_build2 (MULT_EXPR, type,
9893 fold_convert (type, TREE_OPERAND (arg0, 0)),
9894 fold_convert (type, negate_expr (arg1)));
9895 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9896 return fold_build2 (MULT_EXPR, type,
9897 fold_convert (type, negate_expr (arg0)),
9898 fold_convert (type, TREE_OPERAND (arg1, 0)));
9900 if (! FLOAT_TYPE_P (type))
9902 if (integer_zerop (arg1))
9903 return omit_one_operand (type, arg1, arg0);
9904 if (integer_onep (arg1))
9905 return non_lvalue (fold_convert (type, arg0));
9906 /* Transform x * -1 into -x. */
9907 if (integer_all_onesp (arg1))
9908 return fold_convert (type, negate_expr (arg0));
9909 /* Transform x * -C into -x * C if x is easily negatable. */
9910 if (TREE_CODE (arg1) == INTEGER_CST
9911 && tree_int_cst_sgn (arg1) == -1
9912 && negate_expr_p (arg0)
9913 && (tem = negate_expr (arg1)) != arg1
9914 && !TREE_OVERFLOW (tem))
9915 return fold_build2 (MULT_EXPR, type,
9916 negate_expr (arg0), tem);
9918 /* (a * (1 << b)) is (a << b) */
9919 if (TREE_CODE (arg1) == LSHIFT_EXPR
9920 && integer_onep (TREE_OPERAND (arg1, 0)))
9921 return fold_build2 (LSHIFT_EXPR, type, arg0,
9922 TREE_OPERAND (arg1, 1));
9923 if (TREE_CODE (arg0) == LSHIFT_EXPR
9924 && integer_onep (TREE_OPERAND (arg0, 0)))
9925 return fold_build2 (LSHIFT_EXPR, type, arg1,
9926 TREE_OPERAND (arg0, 1));
9928 strict_overflow_p = false;
9929 if (TREE_CODE (arg1) == INTEGER_CST
9930 && 0 != (tem = extract_muldiv (op0,
9931 fold_convert (type, arg1),
9933 &strict_overflow_p)))
9935 if (strict_overflow_p)
9936 fold_overflow_warning (("assuming signed overflow does not "
9937 "occur when simplifying "
9939 WARN_STRICT_OVERFLOW_MISC);
9940 return fold_convert (type, tem);
9943 /* Optimize z * conj(z) for integer complex numbers. */
9944 if (TREE_CODE (arg0) == CONJ_EXPR
9945 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9946 return fold_mult_zconjz (type, arg1);
9947 if (TREE_CODE (arg1) == CONJ_EXPR
9948 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9949 return fold_mult_zconjz (type, arg0);
9953 /* Maybe fold x * 0 to 0. The expressions aren't the same
9954 when x is NaN, since x * 0 is also NaN. Nor are they the
9955 same in modes with signed zeros, since multiplying a
9956 negative value by 0 gives -0, not +0. */
9957 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9958 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9959 && real_zerop (arg1))
9960 return omit_one_operand (type, arg1, arg0);
9961 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9962 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9963 && real_onep (arg1))
9964 return non_lvalue (fold_convert (type, arg0));
9966 /* Transform x * -1.0 into -x. */
9967 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9968 && real_minus_onep (arg1))
9969 return fold_convert (type, negate_expr (arg0));
9971 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9972 if (flag_unsafe_math_optimizations
9973 && TREE_CODE (arg0) == RDIV_EXPR
9974 && TREE_CODE (arg1) == REAL_CST
9975 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9977 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9980 return fold_build2 (RDIV_EXPR, type, tem,
9981 TREE_OPERAND (arg0, 1));
9984 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9985 if (operand_equal_p (arg0, arg1, 0))
9987 tree tem = fold_strip_sign_ops (arg0);
9988 if (tem != NULL_TREE)
9990 tem = fold_convert (type, tem);
9991 return fold_build2 (MULT_EXPR, type, tem, tem);
9995 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9996 This is not the same for NaNs or if signed zeros are
9998 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9999 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10000 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10001 && TREE_CODE (arg1) == COMPLEX_CST
10002 && real_zerop (TREE_REALPART (arg1)))
10004 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10005 if (real_onep (TREE_IMAGPART (arg1)))
10006 return fold_build2 (COMPLEX_EXPR, type,
10007 negate_expr (fold_build1 (IMAGPART_EXPR,
10009 fold_build1 (REALPART_EXPR, rtype, arg0));
10010 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10011 return fold_build2 (COMPLEX_EXPR, type,
10012 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10013 negate_expr (fold_build1 (REALPART_EXPR,
10017 /* Optimize z * conj(z) for floating point complex numbers.
10018 Guarded by flag_unsafe_math_optimizations as non-finite
10019 imaginary components don't produce scalar results. */
10020 if (flag_unsafe_math_optimizations
10021 && TREE_CODE (arg0) == CONJ_EXPR
10022 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10023 return fold_mult_zconjz (type, arg1);
10024 if (flag_unsafe_math_optimizations
10025 && TREE_CODE (arg1) == CONJ_EXPR
10026 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10027 return fold_mult_zconjz (type, arg0);
10029 if (flag_unsafe_math_optimizations)
10031 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10032 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10034 /* Optimizations of root(...)*root(...). */
10035 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10038 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10039 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10041 /* Optimize sqrt(x)*sqrt(x) as x. */
10042 if (BUILTIN_SQRT_P (fcode0)
10043 && operand_equal_p (arg00, arg10, 0)
10044 && ! HONOR_SNANS (TYPE_MODE (type)))
10047 /* Optimize root(x)*root(y) as root(x*y). */
10048 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10049 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10050 return build_call_expr (rootfn, 1, arg);
10053 /* Optimize expN(x)*expN(y) as expN(x+y). */
10054 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10056 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10057 tree arg = fold_build2 (PLUS_EXPR, type,
10058 CALL_EXPR_ARG (arg0, 0),
10059 CALL_EXPR_ARG (arg1, 0));
10060 return build_call_expr (expfn, 1, arg);
10063 /* Optimizations of pow(...)*pow(...). */
10064 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10065 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10066 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10068 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10069 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10070 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10071 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10073 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10074 if (operand_equal_p (arg01, arg11, 0))
10076 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10077 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10078 return build_call_expr (powfn, 2, arg, arg01);
10081 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10082 if (operand_equal_p (arg00, arg10, 0))
10084 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10085 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10086 return build_call_expr (powfn, 2, arg00, arg);
10090 /* Optimize tan(x)*cos(x) as sin(x). */
10091 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10092 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10093 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10094 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10095 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10096 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10097 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10098 CALL_EXPR_ARG (arg1, 0), 0))
10100 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10102 if (sinfn != NULL_TREE)
10103 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10106 /* Optimize x*pow(x,c) as pow(x,c+1). */
10107 if (fcode1 == BUILT_IN_POW
10108 || fcode1 == BUILT_IN_POWF
10109 || fcode1 == BUILT_IN_POWL)
10111 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10112 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10113 if (TREE_CODE (arg11) == REAL_CST
10114 && !TREE_OVERFLOW (arg11)
10115 && operand_equal_p (arg0, arg10, 0))
10117 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10121 c = TREE_REAL_CST (arg11);
10122 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10123 arg = build_real (type, c);
10124 return build_call_expr (powfn, 2, arg0, arg);
10128 /* Optimize pow(x,c)*x as pow(x,c+1). */
10129 if (fcode0 == BUILT_IN_POW
10130 || fcode0 == BUILT_IN_POWF
10131 || fcode0 == BUILT_IN_POWL)
10133 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10134 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10135 if (TREE_CODE (arg01) == REAL_CST
10136 && !TREE_OVERFLOW (arg01)
10137 && operand_equal_p (arg1, arg00, 0))
10139 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10143 c = TREE_REAL_CST (arg01);
10144 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10145 arg = build_real (type, c);
10146 return build_call_expr (powfn, 2, arg1, arg);
10150 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10151 if (! optimize_size
10152 && operand_equal_p (arg0, arg1, 0))
10154 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10158 tree arg = build_real (type, dconst2);
10159 return build_call_expr (powfn, 2, arg0, arg);
10168 if (integer_all_onesp (arg1))
10169 return omit_one_operand (type, arg1, arg0);
10170 if (integer_zerop (arg1))
10171 return non_lvalue (fold_convert (type, arg0));
10172 if (operand_equal_p (arg0, arg1, 0))
10173 return non_lvalue (fold_convert (type, arg0));
10175 /* ~X | X is -1. */
10176 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10177 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10179 t1 = build_int_cst_type (type, -1);
10180 return omit_one_operand (type, t1, arg1);
10183 /* X | ~X is -1. */
10184 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10185 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10187 t1 = build_int_cst_type (type, -1);
10188 return omit_one_operand (type, t1, arg0);
10191 /* Canonicalize (X & C1) | C2. */
10192 if (TREE_CODE (arg0) == BIT_AND_EXPR
10193 && TREE_CODE (arg1) == INTEGER_CST
10194 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10196 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
10197 int width = TYPE_PRECISION (type);
10198 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10199 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10200 hi2 = TREE_INT_CST_HIGH (arg1);
10201 lo2 = TREE_INT_CST_LOW (arg1);
10203 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10204 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10205 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10207 if (width > HOST_BITS_PER_WIDE_INT)
10209 mhi = (unsigned HOST_WIDE_INT) -1
10210 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10216 mlo = (unsigned HOST_WIDE_INT) -1
10217 >> (HOST_BITS_PER_WIDE_INT - width);
10220 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10221 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10222 return fold_build2 (BIT_IOR_EXPR, type,
10223 TREE_OPERAND (arg0, 0), arg1);
10225 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
10228 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
10229 return fold_build2 (BIT_IOR_EXPR, type,
10230 fold_build2 (BIT_AND_EXPR, type,
10231 TREE_OPERAND (arg0, 0),
10232 build_int_cst_wide (type,
10238 /* (X & Y) | Y is (X, Y). */
10239 if (TREE_CODE (arg0) == BIT_AND_EXPR
10240 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10241 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10242 /* (X & Y) | X is (Y, X). */
10243 if (TREE_CODE (arg0) == BIT_AND_EXPR
10244 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10245 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10246 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10247 /* X | (X & Y) is (Y, X). */
10248 if (TREE_CODE (arg1) == BIT_AND_EXPR
10249 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10250 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10251 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10252 /* X | (Y & X) is (Y, X). */
10253 if (TREE_CODE (arg1) == BIT_AND_EXPR
10254 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10255 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10256 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10258 t1 = distribute_bit_expr (code, type, arg0, arg1);
10259 if (t1 != NULL_TREE)
10262 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10264 This results in more efficient code for machines without a NAND
10265 instruction. Combine will canonicalize to the first form
10266 which will allow use of NAND instructions provided by the
10267 backend if they exist. */
10268 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10269 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10271 return fold_build1 (BIT_NOT_EXPR, type,
10272 build2 (BIT_AND_EXPR, type,
10273 TREE_OPERAND (arg0, 0),
10274 TREE_OPERAND (arg1, 0)));
10277 /* See if this can be simplified into a rotate first. If that
10278 is unsuccessful continue in the association code. */
10282 if (integer_zerop (arg1))
10283 return non_lvalue (fold_convert (type, arg0));
10284 if (integer_all_onesp (arg1))
10285 return fold_build1 (BIT_NOT_EXPR, type, op0);
10286 if (operand_equal_p (arg0, arg1, 0))
10287 return omit_one_operand (type, integer_zero_node, arg0);
10289 /* ~X ^ X is -1. */
10290 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10291 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10293 t1 = build_int_cst_type (type, -1);
10294 return omit_one_operand (type, t1, arg1);
10297 /* X ^ ~X is -1. */
10298 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10299 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10301 t1 = build_int_cst_type (type, -1);
10302 return omit_one_operand (type, t1, arg0);
10305 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10306 with a constant, and the two constants have no bits in common,
10307 we should treat this as a BIT_IOR_EXPR since this may produce more
10308 simplifications. */
10309 if (TREE_CODE (arg0) == BIT_AND_EXPR
10310 && TREE_CODE (arg1) == BIT_AND_EXPR
10311 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10312 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10313 && integer_zerop (const_binop (BIT_AND_EXPR,
10314 TREE_OPERAND (arg0, 1),
10315 TREE_OPERAND (arg1, 1), 0)))
10317 code = BIT_IOR_EXPR;
10321 /* (X | Y) ^ X -> Y & ~ X*/
10322 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10323 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10325 tree t2 = TREE_OPERAND (arg0, 1);
10326 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10328 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10329 fold_convert (type, t1));
10333 /* (Y | X) ^ X -> Y & ~ X*/
10334 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10335 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10337 tree t2 = TREE_OPERAND (arg0, 0);
10338 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10340 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10341 fold_convert (type, t1));
10345 /* X ^ (X | Y) -> Y & ~ X*/
10346 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10347 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10349 tree t2 = TREE_OPERAND (arg1, 1);
10350 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10352 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10353 fold_convert (type, t1));
10357 /* X ^ (Y | X) -> Y & ~ X*/
10358 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10359 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10361 tree t2 = TREE_OPERAND (arg1, 0);
10362 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10364 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10365 fold_convert (type, t1));
10369 /* Convert ~X ^ ~Y to X ^ Y. */
10370 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10371 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10372 return fold_build2 (code, type,
10373 fold_convert (type, TREE_OPERAND (arg0, 0)),
10374 fold_convert (type, TREE_OPERAND (arg1, 0)));
10376 /* Convert ~X ^ C to X ^ ~C. */
10377 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10378 && TREE_CODE (arg1) == INTEGER_CST)
10379 return fold_build2 (code, type,
10380 fold_convert (type, TREE_OPERAND (arg0, 0)),
10381 fold_build1 (BIT_NOT_EXPR, type, arg1));
10383 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10384 if (TREE_CODE (arg0) == BIT_AND_EXPR
10385 && integer_onep (TREE_OPERAND (arg0, 1))
10386 && integer_onep (arg1))
10387 return fold_build2 (EQ_EXPR, type, arg0,
10388 build_int_cst (TREE_TYPE (arg0), 0));
10390 /* Fold (X & Y) ^ Y as ~X & Y. */
10391 if (TREE_CODE (arg0) == BIT_AND_EXPR
10392 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10394 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10395 return fold_build2 (BIT_AND_EXPR, type,
10396 fold_build1 (BIT_NOT_EXPR, type, tem),
10397 fold_convert (type, arg1));
10399 /* Fold (X & Y) ^ X as ~Y & X. */
10400 if (TREE_CODE (arg0) == BIT_AND_EXPR
10401 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10402 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10404 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10405 return fold_build2 (BIT_AND_EXPR, type,
10406 fold_build1 (BIT_NOT_EXPR, type, tem),
10407 fold_convert (type, arg1));
10409 /* Fold X ^ (X & Y) as X & ~Y. */
10410 if (TREE_CODE (arg1) == BIT_AND_EXPR
10411 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10413 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10414 return fold_build2 (BIT_AND_EXPR, type,
10415 fold_convert (type, arg0),
10416 fold_build1 (BIT_NOT_EXPR, type, tem));
10418 /* Fold X ^ (Y & X) as ~Y & X. */
10419 if (TREE_CODE (arg1) == BIT_AND_EXPR
10420 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10421 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10423 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10424 return fold_build2 (BIT_AND_EXPR, type,
10425 fold_build1 (BIT_NOT_EXPR, type, tem),
10426 fold_convert (type, arg0));
10429 /* See if this can be simplified into a rotate first. If that
10430 is unsuccessful continue in the association code. */
10434 if (integer_all_onesp (arg1))
10435 return non_lvalue (fold_convert (type, arg0));
10436 if (integer_zerop (arg1))
10437 return omit_one_operand (type, arg1, arg0);
10438 if (operand_equal_p (arg0, arg1, 0))
10439 return non_lvalue (fold_convert (type, arg0));
10441 /* ~X & X is always zero. */
10442 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10443 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10444 return omit_one_operand (type, integer_zero_node, arg1);
10446 /* X & ~X is always zero. */
10447 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10448 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10449 return omit_one_operand (type, integer_zero_node, arg0);
10451 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10452 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10453 && TREE_CODE (arg1) == INTEGER_CST
10454 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10455 return fold_build2 (BIT_IOR_EXPR, type,
10456 fold_build2 (BIT_AND_EXPR, type,
10457 TREE_OPERAND (arg0, 0), arg1),
10458 fold_build2 (BIT_AND_EXPR, type,
10459 TREE_OPERAND (arg0, 1), arg1));
10461 /* (X | Y) & Y is (X, Y). */
10462 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10463 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10464 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10465 /* (X | Y) & X is (Y, X). */
10466 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10467 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10468 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10469 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10470 /* X & (X | Y) is (Y, X). */
10471 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10472 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10473 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10474 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10475 /* X & (Y | X) is (Y, X). */
10476 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10477 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10478 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10479 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10481 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10482 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10483 && integer_onep (TREE_OPERAND (arg0, 1))
10484 && integer_onep (arg1))
10486 tem = TREE_OPERAND (arg0, 0);
10487 return fold_build2 (EQ_EXPR, type,
10488 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10489 build_int_cst (TREE_TYPE (tem), 1)),
10490 build_int_cst (TREE_TYPE (tem), 0));
10492 /* Fold ~X & 1 as (X & 1) == 0. */
10493 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10494 && integer_onep (arg1))
10496 tem = TREE_OPERAND (arg0, 0);
10497 return fold_build2 (EQ_EXPR, type,
10498 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10499 build_int_cst (TREE_TYPE (tem), 1)),
10500 build_int_cst (TREE_TYPE (tem), 0));
10503 /* Fold (X ^ Y) & Y as ~X & Y. */
10504 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10505 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10507 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10508 return fold_build2 (BIT_AND_EXPR, type,
10509 fold_build1 (BIT_NOT_EXPR, type, tem),
10510 fold_convert (type, arg1));
10512 /* Fold (X ^ Y) & X as ~Y & X. */
10513 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10514 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10515 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10517 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10518 return fold_build2 (BIT_AND_EXPR, type,
10519 fold_build1 (BIT_NOT_EXPR, type, tem),
10520 fold_convert (type, arg1));
10522 /* Fold X & (X ^ Y) as X & ~Y. */
10523 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10524 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10526 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10527 return fold_build2 (BIT_AND_EXPR, type,
10528 fold_convert (type, arg0),
10529 fold_build1 (BIT_NOT_EXPR, type, tem));
10531 /* Fold X & (Y ^ X) as ~Y & X. */
10532 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10533 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10534 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10536 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10537 return fold_build2 (BIT_AND_EXPR, type,
10538 fold_build1 (BIT_NOT_EXPR, type, tem),
10539 fold_convert (type, arg0));
10542 t1 = distribute_bit_expr (code, type, arg0, arg1);
10543 if (t1 != NULL_TREE)
10545 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10546 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10547 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10550 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10552 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10553 && (~TREE_INT_CST_LOW (arg1)
10554 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10555 return fold_convert (type, TREE_OPERAND (arg0, 0));
10558 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10560 This results in more efficient code for machines without a NOR
10561 instruction. Combine will canonicalize to the first form
10562 which will allow use of NOR instructions provided by the
10563 backend if they exist. */
10564 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10565 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10567 return fold_build1 (BIT_NOT_EXPR, type,
10568 build2 (BIT_IOR_EXPR, type,
10569 TREE_OPERAND (arg0, 0),
10570 TREE_OPERAND (arg1, 0)));
10576 /* Don't touch a floating-point divide by zero unless the mode
10577 of the constant can represent infinity. */
10578 if (TREE_CODE (arg1) == REAL_CST
10579 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10580 && real_zerop (arg1))
10583 /* Optimize A / A to 1.0 if we don't care about
10584 NaNs or Infinities. Skip the transformation
10585 for non-real operands. */
10586 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10587 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10588 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10589 && operand_equal_p (arg0, arg1, 0))
10591 tree r = build_real (TREE_TYPE (arg0), dconst1);
10593 return omit_two_operands (type, r, arg0, arg1);
10596 /* The complex version of the above A / A optimization. */
10597 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10598 && operand_equal_p (arg0, arg1, 0))
10600 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10601 if (! HONOR_NANS (TYPE_MODE (elem_type))
10602 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10604 tree r = build_real (elem_type, dconst1);
10605 /* omit_two_operands will call fold_convert for us. */
10606 return omit_two_operands (type, r, arg0, arg1);
10610 /* (-A) / (-B) -> A / B */
10611 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10612 return fold_build2 (RDIV_EXPR, type,
10613 TREE_OPERAND (arg0, 0),
10614 negate_expr (arg1));
10615 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10616 return fold_build2 (RDIV_EXPR, type,
10617 negate_expr (arg0),
10618 TREE_OPERAND (arg1, 0));
10620 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10621 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10622 && real_onep (arg1))
10623 return non_lvalue (fold_convert (type, arg0));
10625 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10626 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10627 && real_minus_onep (arg1))
10628 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10630 /* If ARG1 is a constant, we can convert this to a multiply by the
10631 reciprocal. This does not have the same rounding properties,
10632 so only do this if -funsafe-math-optimizations. We can actually
10633 always safely do it if ARG1 is a power of two, but it's hard to
10634 tell if it is or not in a portable manner. */
10635 if (TREE_CODE (arg1) == REAL_CST)
10637 if (flag_unsafe_math_optimizations
10638 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10640 return fold_build2 (MULT_EXPR, type, arg0, tem);
10641 /* Find the reciprocal if optimizing and the result is exact. */
10645 r = TREE_REAL_CST (arg1);
10646 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10648 tem = build_real (type, r);
10649 return fold_build2 (MULT_EXPR, type,
10650 fold_convert (type, arg0), tem);
10654 /* Convert A/B/C to A/(B*C). */
10655 if (flag_unsafe_math_optimizations
10656 && TREE_CODE (arg0) == RDIV_EXPR)
10657 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10658 fold_build2 (MULT_EXPR, type,
10659 TREE_OPERAND (arg0, 1), arg1));
10661 /* Convert A/(B/C) to (A/B)*C. */
10662 if (flag_unsafe_math_optimizations
10663 && TREE_CODE (arg1) == RDIV_EXPR)
10664 return fold_build2 (MULT_EXPR, type,
10665 fold_build2 (RDIV_EXPR, type, arg0,
10666 TREE_OPERAND (arg1, 0)),
10667 TREE_OPERAND (arg1, 1));
10669 /* Convert C1/(X*C2) into (C1/C2)/X. */
10670 if (flag_unsafe_math_optimizations
10671 && TREE_CODE (arg1) == MULT_EXPR
10672 && TREE_CODE (arg0) == REAL_CST
10673 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10675 tree tem = const_binop (RDIV_EXPR, arg0,
10676 TREE_OPERAND (arg1, 1), 0);
10678 return fold_build2 (RDIV_EXPR, type, tem,
10679 TREE_OPERAND (arg1, 0));
10682 if (flag_unsafe_math_optimizations)
10684 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10685 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10687 /* Optimize sin(x)/cos(x) as tan(x). */
10688 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10689 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10690 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10691 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10692 CALL_EXPR_ARG (arg1, 0), 0))
10694 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10696 if (tanfn != NULL_TREE)
10697 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10700 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10701 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10702 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10703 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10704 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10705 CALL_EXPR_ARG (arg1, 0), 0))
10707 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10709 if (tanfn != NULL_TREE)
10711 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
10712 return fold_build2 (RDIV_EXPR, type,
10713 build_real (type, dconst1), tmp);
10717 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10718 NaNs or Infinities. */
10719 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10720 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10721 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10723 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10724 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10726 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10727 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10728 && operand_equal_p (arg00, arg01, 0))
10730 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10732 if (cosfn != NULL_TREE)
10733 return build_call_expr (cosfn, 1, arg00);
10737 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10738 NaNs or Infinities. */
10739 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10740 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10741 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10743 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10744 tree arg01 = CALL_EXPR_ARG (arg1, 0);
10746 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10747 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10748 && operand_equal_p (arg00, arg01, 0))
10750 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10752 if (cosfn != NULL_TREE)
10754 tree tmp = build_call_expr (cosfn, 1, arg00);
10755 return fold_build2 (RDIV_EXPR, type,
10756 build_real (type, dconst1),
10762 /* Optimize pow(x,c)/x as pow(x,c-1). */
10763 if (fcode0 == BUILT_IN_POW
10764 || fcode0 == BUILT_IN_POWF
10765 || fcode0 == BUILT_IN_POWL)
10767 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10768 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10769 if (TREE_CODE (arg01) == REAL_CST
10770 && !TREE_OVERFLOW (arg01)
10771 && operand_equal_p (arg1, arg00, 0))
10773 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10777 c = TREE_REAL_CST (arg01);
10778 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10779 arg = build_real (type, c);
10780 return build_call_expr (powfn, 2, arg1, arg);
10784 /* Optimize a/root(b/c) into a*root(c/b). */
10785 if (BUILTIN_ROOT_P (fcode1))
10787 tree rootarg = CALL_EXPR_ARG (arg1, 0);
10789 if (TREE_CODE (rootarg) == RDIV_EXPR)
10791 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10792 tree b = TREE_OPERAND (rootarg, 0);
10793 tree c = TREE_OPERAND (rootarg, 1);
10795 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
10797 tmp = build_call_expr (rootfn, 1, tmp);
10798 return fold_build2 (MULT_EXPR, type, arg0, tmp);
10802 /* Optimize x/expN(y) into x*expN(-y). */
10803 if (BUILTIN_EXPONENT_P (fcode1))
10805 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10806 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
10807 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
10808 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10811 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10812 if (fcode1 == BUILT_IN_POW
10813 || fcode1 == BUILT_IN_POWF
10814 || fcode1 == BUILT_IN_POWL)
10816 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10817 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10818 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10819 tree neg11 = fold_convert (type, negate_expr (arg11));
10820 arg1 = build_call_expr (powfn, 2, arg10, neg11);
10821 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10826 case TRUNC_DIV_EXPR:
10827 case FLOOR_DIV_EXPR:
10828 /* Simplify A / (B << N) where A and B are positive and B is
10829 a power of 2, to A >> (N + log2(B)). */
10830 strict_overflow_p = false;
10831 if (TREE_CODE (arg1) == LSHIFT_EXPR
10832 && (TYPE_UNSIGNED (type)
10833 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10835 tree sval = TREE_OPERAND (arg1, 0);
10836 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10838 tree sh_cnt = TREE_OPERAND (arg1, 1);
10839 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10841 if (strict_overflow_p)
10842 fold_overflow_warning (("assuming signed overflow does not "
10843 "occur when simplifying A / (B << N)"),
10844 WARN_STRICT_OVERFLOW_MISC);
10846 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10847 sh_cnt, build_int_cst (NULL_TREE, pow2));
10848 return fold_build2 (RSHIFT_EXPR, type,
10849 fold_convert (type, arg0), sh_cnt);
10854 case ROUND_DIV_EXPR:
10855 case CEIL_DIV_EXPR:
10856 case EXACT_DIV_EXPR:
10857 if (integer_onep (arg1))
10858 return non_lvalue (fold_convert (type, arg0));
10859 if (integer_zerop (arg1))
10861 /* X / -1 is -X. */
10862 if (!TYPE_UNSIGNED (type)
10863 && TREE_CODE (arg1) == INTEGER_CST
10864 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10865 && TREE_INT_CST_HIGH (arg1) == -1)
10866 return fold_convert (type, negate_expr (arg0));
10868 /* Convert -A / -B to A / B when the type is signed and overflow is
10870 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10871 && TREE_CODE (arg0) == NEGATE_EXPR
10872 && negate_expr_p (arg1))
10874 if (INTEGRAL_TYPE_P (type))
10875 fold_overflow_warning (("assuming signed overflow does not occur "
10876 "when distributing negation across "
10878 WARN_STRICT_OVERFLOW_MISC);
10879 return fold_build2 (code, type,
10880 fold_convert (type, TREE_OPERAND (arg0, 0)),
10881 negate_expr (arg1));
10883 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10884 && TREE_CODE (arg1) == NEGATE_EXPR
10885 && negate_expr_p (arg0))
10887 if (INTEGRAL_TYPE_P (type))
10888 fold_overflow_warning (("assuming signed overflow does not occur "
10889 "when distributing negation across "
10891 WARN_STRICT_OVERFLOW_MISC);
10892 return fold_build2 (code, type, negate_expr (arg0),
10893 TREE_OPERAND (arg1, 0));
10896 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10897 operation, EXACT_DIV_EXPR.
10899 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10900 At one time others generated faster code, it's not clear if they do
10901 after the last round to changes to the DIV code in expmed.c. */
10902 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10903 && multiple_of_p (type, arg0, arg1))
10904 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10906 strict_overflow_p = false;
10907 if (TREE_CODE (arg1) == INTEGER_CST
10908 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10909 &strict_overflow_p)))
10911 if (strict_overflow_p)
10912 fold_overflow_warning (("assuming signed overflow does not occur "
10913 "when simplifying division"),
10914 WARN_STRICT_OVERFLOW_MISC);
10915 return fold_convert (type, tem);
10920 case CEIL_MOD_EXPR:
10921 case FLOOR_MOD_EXPR:
10922 case ROUND_MOD_EXPR:
10923 case TRUNC_MOD_EXPR:
10924 /* X % 1 is always zero, but be sure to preserve any side
10926 if (integer_onep (arg1))
10927 return omit_one_operand (type, integer_zero_node, arg0);
10929 /* X % 0, return X % 0 unchanged so that we can get the
10930 proper warnings and errors. */
10931 if (integer_zerop (arg1))
10934 /* 0 % X is always zero, but be sure to preserve any side
10935 effects in X. Place this after checking for X == 0. */
10936 if (integer_zerop (arg0))
10937 return omit_one_operand (type, integer_zero_node, arg1);
10939 /* X % -1 is zero. */
10940 if (!TYPE_UNSIGNED (type)
10941 && TREE_CODE (arg1) == INTEGER_CST
10942 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10943 && TREE_INT_CST_HIGH (arg1) == -1)
10944 return omit_one_operand (type, integer_zero_node, arg0);
10946 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10947 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10948 strict_overflow_p = false;
10949 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10950 && (TYPE_UNSIGNED (type)
10951 || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)))
10954 /* Also optimize A % (C << N) where C is a power of 2,
10955 to A & ((C << N) - 1). */
10956 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10957 c = TREE_OPERAND (arg1, 0);
10959 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10961 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10962 build_int_cst (TREE_TYPE (arg1), 1));
10963 if (strict_overflow_p)
10964 fold_overflow_warning (("assuming signed overflow does not "
10965 "occur when simplifying "
10966 "X % (power of two)"),
10967 WARN_STRICT_OVERFLOW_MISC);
10968 return fold_build2 (BIT_AND_EXPR, type,
10969 fold_convert (type, arg0),
10970 fold_convert (type, mask));
10974 /* X % -C is the same as X % C. */
10975 if (code == TRUNC_MOD_EXPR
10976 && !TYPE_UNSIGNED (type)
10977 && TREE_CODE (arg1) == INTEGER_CST
10978 && !TREE_OVERFLOW (arg1)
10979 && TREE_INT_CST_HIGH (arg1) < 0
10980 && !TYPE_OVERFLOW_TRAPS (type)
10981 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10982 && !sign_bit_p (arg1, arg1))
10983 return fold_build2 (code, type, fold_convert (type, arg0),
10984 fold_convert (type, negate_expr (arg1)));
10986 /* X % -Y is the same as X % Y. */
10987 if (code == TRUNC_MOD_EXPR
10988 && !TYPE_UNSIGNED (type)
10989 && TREE_CODE (arg1) == NEGATE_EXPR
10990 && !TYPE_OVERFLOW_TRAPS (type))
10991 return fold_build2 (code, type, fold_convert (type, arg0),
10992 fold_convert (type, TREE_OPERAND (arg1, 0)));
10994 if (TREE_CODE (arg1) == INTEGER_CST
10995 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10996 &strict_overflow_p)))
10998 if (strict_overflow_p)
10999 fold_overflow_warning (("assuming signed overflow does not occur "
11000 "when simplifying modulos"),
11001 WARN_STRICT_OVERFLOW_MISC);
11002 return fold_convert (type, tem);
11009 if (integer_all_onesp (arg0))
11010 return omit_one_operand (type, arg0, arg1);
11014 /* Optimize -1 >> x for arithmetic right shifts. */
11015 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11016 return omit_one_operand (type, arg0, arg1);
11017 /* ... fall through ... */
11021 if (integer_zerop (arg1))
11022 return non_lvalue (fold_convert (type, arg0));
11023 if (integer_zerop (arg0))
11024 return omit_one_operand (type, arg0, arg1);
11026 /* Since negative shift count is not well-defined,
11027 don't try to compute it in the compiler. */
11028 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11031 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11032 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11033 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11034 && host_integerp (TREE_OPERAND (arg0, 1), false)
11035 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11037 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11038 + TREE_INT_CST_LOW (arg1));
11040 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11041 being well defined. */
11042 if (low >= TYPE_PRECISION (type))
11044 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11045 low = low % TYPE_PRECISION (type);
11046 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11047 return build_int_cst (type, 0);
11049 low = TYPE_PRECISION (type) - 1;
11052 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11053 build_int_cst (type, low));
11056 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11057 into x & ((unsigned)-1 >> c) for unsigned types. */
11058 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11059 || (TYPE_UNSIGNED (type)
11060 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11061 && host_integerp (arg1, false)
11062 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11063 && host_integerp (TREE_OPERAND (arg0, 1), false)
11064 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11066 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11067 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11073 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11075 lshift = build_int_cst (type, -1);
11076 lshift = int_const_binop (code, lshift, arg1, 0);
11078 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11082 /* Rewrite an LROTATE_EXPR by a constant into an
11083 RROTATE_EXPR by a new constant. */
11084 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11086 tree tem = build_int_cst (TREE_TYPE (arg1),
11087 GET_MODE_BITSIZE (TYPE_MODE (type)));
11088 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11089 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
11092 /* If we have a rotate of a bit operation with the rotate count and
11093 the second operand of the bit operation both constant,
11094 permute the two operations. */
11095 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11096 && (TREE_CODE (arg0) == BIT_AND_EXPR
11097 || TREE_CODE (arg0) == BIT_IOR_EXPR
11098 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11099 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11100 return fold_build2 (TREE_CODE (arg0), type,
11101 fold_build2 (code, type,
11102 TREE_OPERAND (arg0, 0), arg1),
11103 fold_build2 (code, type,
11104 TREE_OPERAND (arg0, 1), arg1));
11106 /* Two consecutive rotates adding up to the width of the mode can
11108 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11109 && TREE_CODE (arg0) == RROTATE_EXPR
11110 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11111 && TREE_INT_CST_HIGH (arg1) == 0
11112 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11113 && ((TREE_INT_CST_LOW (arg1)
11114 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11115 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
11116 return TREE_OPERAND (arg0, 0);
11121 if (operand_equal_p (arg0, arg1, 0))
11122 return omit_one_operand (type, arg0, arg1);
11123 if (INTEGRAL_TYPE_P (type)
11124 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11125 return omit_one_operand (type, arg1, arg0);
11126 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11132 if (operand_equal_p (arg0, arg1, 0))
11133 return omit_one_operand (type, arg0, arg1);
11134 if (INTEGRAL_TYPE_P (type)
11135 && TYPE_MAX_VALUE (type)
11136 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11137 return omit_one_operand (type, arg1, arg0);
11138 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11143 case TRUTH_ANDIF_EXPR:
11144 /* Note that the operands of this must be ints
11145 and their values must be 0 or 1.
11146 ("true" is a fixed value perhaps depending on the language.) */
11147 /* If first arg is constant zero, return it. */
11148 if (integer_zerop (arg0))
11149 return fold_convert (type, arg0);
11150 case TRUTH_AND_EXPR:
11151 /* If either arg is constant true, drop it. */
11152 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11153 return non_lvalue (fold_convert (type, arg1));
11154 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11155 /* Preserve sequence points. */
11156 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11157 return non_lvalue (fold_convert (type, arg0));
11158 /* If second arg is constant zero, result is zero, but first arg
11159 must be evaluated. */
11160 if (integer_zerop (arg1))
11161 return omit_one_operand (type, arg1, arg0);
11162 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11163 case will be handled here. */
11164 if (integer_zerop (arg0))
11165 return omit_one_operand (type, arg0, arg1);
11167 /* !X && X is always false. */
11168 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11169 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11170 return omit_one_operand (type, integer_zero_node, arg1);
11171 /* X && !X is always false. */
11172 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11173 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11174 return omit_one_operand (type, integer_zero_node, arg0);
11176 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11177 means A >= Y && A != MAX, but in this case we know that
11180 if (!TREE_SIDE_EFFECTS (arg0)
11181 && !TREE_SIDE_EFFECTS (arg1))
11183 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11184 if (tem && !operand_equal_p (tem, arg0, 0))
11185 return fold_build2 (code, type, tem, arg1);
11187 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11188 if (tem && !operand_equal_p (tem, arg1, 0))
11189 return fold_build2 (code, type, arg0, tem);
11193 /* We only do these simplifications if we are optimizing. */
11197 /* Check for things like (A || B) && (A || C). We can convert this
11198 to A || (B && C). Note that either operator can be any of the four
11199 truth and/or operations and the transformation will still be
11200 valid. Also note that we only care about order for the
11201 ANDIF and ORIF operators. If B contains side effects, this
11202 might change the truth-value of A. */
11203 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11204 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11205 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11206 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11207 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11208 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11210 tree a00 = TREE_OPERAND (arg0, 0);
11211 tree a01 = TREE_OPERAND (arg0, 1);
11212 tree a10 = TREE_OPERAND (arg1, 0);
11213 tree a11 = TREE_OPERAND (arg1, 1);
11214 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11215 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11216 && (code == TRUTH_AND_EXPR
11217 || code == TRUTH_OR_EXPR));
11219 if (operand_equal_p (a00, a10, 0))
11220 return fold_build2 (TREE_CODE (arg0), type, a00,
11221 fold_build2 (code, type, a01, a11));
11222 else if (commutative && operand_equal_p (a00, a11, 0))
11223 return fold_build2 (TREE_CODE (arg0), type, a00,
11224 fold_build2 (code, type, a01, a10));
11225 else if (commutative && operand_equal_p (a01, a10, 0))
11226 return fold_build2 (TREE_CODE (arg0), type, a01,
11227 fold_build2 (code, type, a00, a11));
11229 /* This case if tricky because we must either have commutative
11230 operators or else A10 must not have side-effects. */
11232 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11233 && operand_equal_p (a01, a11, 0))
11234 return fold_build2 (TREE_CODE (arg0), type,
11235 fold_build2 (code, type, a00, a10),
11239 /* See if we can build a range comparison. */
11240 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11243 /* Check for the possibility of merging component references. If our
11244 lhs is another similar operation, try to merge its rhs with our
11245 rhs. Then try to merge our lhs and rhs. */
11246 if (TREE_CODE (arg0) == code
11247 && 0 != (tem = fold_truthop (code, type,
11248 TREE_OPERAND (arg0, 1), arg1)))
11249 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11251 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11256 case TRUTH_ORIF_EXPR:
11257 /* Note that the operands of this must be ints
11258 and their values must be 0 or true.
11259 ("true" is a fixed value perhaps depending on the language.) */
11260 /* If first arg is constant true, return it. */
11261 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11262 return fold_convert (type, arg0);
11263 case TRUTH_OR_EXPR:
11264 /* If either arg is constant zero, drop it. */
11265 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11266 return non_lvalue (fold_convert (type, arg1));
11267 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11268 /* Preserve sequence points. */
11269 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11270 return non_lvalue (fold_convert (type, arg0));
11271 /* If second arg is constant true, result is true, but we must
11272 evaluate first arg. */
11273 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11274 return omit_one_operand (type, arg1, arg0);
11275 /* Likewise for first arg, but note this only occurs here for
11277 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11278 return omit_one_operand (type, arg0, arg1);
11280 /* !X || X is always true. */
11281 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11282 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11283 return omit_one_operand (type, integer_one_node, arg1);
11284 /* X || !X is always true. */
11285 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11286 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11287 return omit_one_operand (type, integer_one_node, arg0);
11291 case TRUTH_XOR_EXPR:
11292 /* If the second arg is constant zero, drop it. */
11293 if (integer_zerop (arg1))
11294 return non_lvalue (fold_convert (type, arg0));
11295 /* If the second arg is constant true, this is a logical inversion. */
11296 if (integer_onep (arg1))
11298 /* Only call invert_truthvalue if operand is a truth value. */
11299 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11300 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11302 tem = invert_truthvalue (arg0);
11303 return non_lvalue (fold_convert (type, tem));
11305 /* Identical arguments cancel to zero. */
11306 if (operand_equal_p (arg0, arg1, 0))
11307 return omit_one_operand (type, integer_zero_node, arg0);
11309 /* !X ^ X is always true. */
11310 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11311 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11312 return omit_one_operand (type, integer_one_node, arg1);
11314 /* X ^ !X is always true. */
11315 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11316 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11317 return omit_one_operand (type, integer_one_node, arg0);
11323 tem = fold_comparison (code, type, op0, op1);
11324 if (tem != NULL_TREE)
11327 /* bool_var != 0 becomes bool_var. */
11328 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11329 && code == NE_EXPR)
11330 return non_lvalue (fold_convert (type, arg0));
11332 /* bool_var == 1 becomes bool_var. */
11333 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11334 && code == EQ_EXPR)
11335 return non_lvalue (fold_convert (type, arg0));
11337 /* bool_var != 1 becomes !bool_var. */
11338 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11339 && code == NE_EXPR)
11340 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11342 /* bool_var == 0 becomes !bool_var. */
11343 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11344 && code == EQ_EXPR)
11345 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
11347 /* If this is an equality comparison of the address of two non-weak,
11348 unaliased symbols neither of which are extern (since we do not
11349 have access to attributes for externs), then we know the result. */
11350 if (TREE_CODE (arg0) == ADDR_EXPR
11351 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11352 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11353 && ! lookup_attribute ("alias",
11354 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11355 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11356 && TREE_CODE (arg1) == ADDR_EXPR
11357 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11358 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11359 && ! lookup_attribute ("alias",
11360 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11361 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11363 /* We know that we're looking at the address of two
11364 non-weak, unaliased, static _DECL nodes.
11366 It is both wasteful and incorrect to call operand_equal_p
11367 to compare the two ADDR_EXPR nodes. It is wasteful in that
11368 all we need to do is test pointer equality for the arguments
11369 to the two ADDR_EXPR nodes. It is incorrect to use
11370 operand_equal_p as that function is NOT equivalent to a
11371 C equality test. It can in fact return false for two
11372 objects which would test as equal using the C equality
11374 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11375 return constant_boolean_node (equal
11376 ? code == EQ_EXPR : code != EQ_EXPR,
11380 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11381 a MINUS_EXPR of a constant, we can convert it into a comparison with
11382 a revised constant as long as no overflow occurs. */
11383 if (TREE_CODE (arg1) == INTEGER_CST
11384 && (TREE_CODE (arg0) == PLUS_EXPR
11385 || TREE_CODE (arg0) == MINUS_EXPR)
11386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11387 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11388 ? MINUS_EXPR : PLUS_EXPR,
11389 fold_convert (TREE_TYPE (arg0), arg1),
11390 TREE_OPERAND (arg0, 1), 0))
11391 && !TREE_OVERFLOW (tem))
11392 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11394 /* Similarly for a NEGATE_EXPR. */
11395 if (TREE_CODE (arg0) == NEGATE_EXPR
11396 && TREE_CODE (arg1) == INTEGER_CST
11397 && 0 != (tem = negate_expr (arg1))
11398 && TREE_CODE (tem) == INTEGER_CST
11399 && !TREE_OVERFLOW (tem))
11400 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11402 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11403 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11404 && TREE_CODE (arg1) == INTEGER_CST
11405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11406 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11407 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11408 fold_convert (TREE_TYPE (arg0), arg1),
11409 TREE_OPERAND (arg0, 1)));
11411 /* Transform comparisons of the form X +- C CMP X. */
11412 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11413 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11414 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11415 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11416 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11418 tree cst = TREE_OPERAND (arg0, 1);
11420 if (code == EQ_EXPR
11421 && !integer_zerop (cst))
11422 return omit_two_operands (type, boolean_false_node,
11423 TREE_OPERAND (arg0, 0), arg1);
11425 return omit_two_operands (type, boolean_true_node,
11426 TREE_OPERAND (arg0, 0), arg1);
11429 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11430 for !=. Don't do this for ordered comparisons due to overflow. */
11431 if (TREE_CODE (arg0) == MINUS_EXPR
11432 && integer_zerop (arg1))
11433 return fold_build2 (code, type,
11434 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11436 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11437 if (TREE_CODE (arg0) == ABS_EXPR
11438 && (integer_zerop (arg1) || real_zerop (arg1)))
11439 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11441 /* If this is an EQ or NE comparison with zero and ARG0 is
11442 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11443 two operations, but the latter can be done in one less insn
11444 on machines that have only two-operand insns or on which a
11445 constant cannot be the first operand. */
11446 if (TREE_CODE (arg0) == BIT_AND_EXPR
11447 && integer_zerop (arg1))
11449 tree arg00 = TREE_OPERAND (arg0, 0);
11450 tree arg01 = TREE_OPERAND (arg0, 1);
11451 if (TREE_CODE (arg00) == LSHIFT_EXPR
11452 && integer_onep (TREE_OPERAND (arg00, 0)))
11454 fold_build2 (code, type,
11455 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11456 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11457 arg01, TREE_OPERAND (arg00, 1)),
11458 fold_convert (TREE_TYPE (arg0),
11459 integer_one_node)),
11461 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
11462 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
11464 fold_build2 (code, type,
11465 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11466 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11467 arg00, TREE_OPERAND (arg01, 1)),
11468 fold_convert (TREE_TYPE (arg0),
11469 integer_one_node)),
11473 /* If this is an NE or EQ comparison of zero against the result of a
11474 signed MOD operation whose second operand is a power of 2, make
11475 the MOD operation unsigned since it is simpler and equivalent. */
11476 if (integer_zerop (arg1)
11477 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11478 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11479 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11480 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11481 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11482 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11484 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11485 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11486 fold_convert (newtype,
11487 TREE_OPERAND (arg0, 0)),
11488 fold_convert (newtype,
11489 TREE_OPERAND (arg0, 1)));
11491 return fold_build2 (code, type, newmod,
11492 fold_convert (newtype, arg1));
11495 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11496 C1 is a valid shift constant, and C2 is a power of two, i.e.
11498 if (TREE_CODE (arg0) == BIT_AND_EXPR
11499 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11500 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11502 && integer_pow2p (TREE_OPERAND (arg0, 1))
11503 && integer_zerop (arg1))
11505 tree itype = TREE_TYPE (arg0);
11506 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11507 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11509 /* Check for a valid shift count. */
11510 if (TREE_INT_CST_HIGH (arg001) == 0
11511 && TREE_INT_CST_LOW (arg001) < prec)
11513 tree arg01 = TREE_OPERAND (arg0, 1);
11514 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11515 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11516 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11517 can be rewritten as (X & (C2 << C1)) != 0. */
11518 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11520 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11521 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11522 return fold_build2 (code, type, tem, arg1);
11524 /* Otherwise, for signed (arithmetic) shifts,
11525 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11526 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11527 else if (!TYPE_UNSIGNED (itype))
11528 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11529 arg000, build_int_cst (itype, 0));
11530 /* Otherwise, of unsigned (logical) shifts,
11531 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11532 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11534 return omit_one_operand (type,
11535 code == EQ_EXPR ? integer_one_node
11536 : integer_zero_node,
11541 /* If this is an NE comparison of zero with an AND of one, remove the
11542 comparison since the AND will give the correct value. */
11543 if (code == NE_EXPR
11544 && integer_zerop (arg1)
11545 && TREE_CODE (arg0) == BIT_AND_EXPR
11546 && integer_onep (TREE_OPERAND (arg0, 1)))
11547 return fold_convert (type, arg0);
11549 /* If we have (A & C) == C where C is a power of 2, convert this into
11550 (A & C) != 0. Similarly for NE_EXPR. */
11551 if (TREE_CODE (arg0) == BIT_AND_EXPR
11552 && integer_pow2p (TREE_OPERAND (arg0, 1))
11553 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11554 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11555 arg0, fold_convert (TREE_TYPE (arg0),
11556 integer_zero_node));
11558 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11559 bit, then fold the expression into A < 0 or A >= 0. */
11560 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
11564 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
11565 Similarly for NE_EXPR. */
11566 if (TREE_CODE (arg0) == BIT_AND_EXPR
11567 && TREE_CODE (arg1) == INTEGER_CST
11568 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11570 tree notc = fold_build1 (BIT_NOT_EXPR,
11571 TREE_TYPE (TREE_OPERAND (arg0, 1)),
11572 TREE_OPERAND (arg0, 1));
11573 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11575 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11576 if (integer_nonzerop (dandnotc))
11577 return omit_one_operand (type, rslt, arg0);
11580 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
11581 Similarly for NE_EXPR. */
11582 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11583 && TREE_CODE (arg1) == INTEGER_CST
11584 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11586 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
11587 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11588 TREE_OPERAND (arg0, 1), notd);
11589 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
11590 if (integer_nonzerop (candnotd))
11591 return omit_one_operand (type, rslt, arg0);
11594 /* If this is a comparison of a field, we may be able to simplify it. */
11595 if ((TREE_CODE (arg0) == COMPONENT_REF
11596 || TREE_CODE (arg0) == BIT_FIELD_REF)
11597 /* Handle the constant case even without -O
11598 to make sure the warnings are given. */
11599 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11601 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
11606 /* Optimize comparisons of strlen vs zero to a compare of the
11607 first character of the string vs zero. To wit,
11608 strlen(ptr) == 0 => *ptr == 0
11609 strlen(ptr) != 0 => *ptr != 0
11610 Other cases should reduce to one of these two (or a constant)
11611 due to the return value of strlen being unsigned. */
11612 if (TREE_CODE (arg0) == CALL_EXPR
11613 && integer_zerop (arg1))
11615 tree fndecl = get_callee_fndecl (arg0);
11618 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
11619 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
11620 && call_expr_nargs (arg0) == 1
11621 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
11623 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
11624 return fold_build2 (code, type, iref,
11625 build_int_cst (TREE_TYPE (iref), 0));
11629 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11630 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11631 if (TREE_CODE (arg0) == RSHIFT_EXPR
11632 && integer_zerop (arg1)
11633 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11635 tree arg00 = TREE_OPERAND (arg0, 0);
11636 tree arg01 = TREE_OPERAND (arg0, 1);
11637 tree itype = TREE_TYPE (arg00);
11638 if (TREE_INT_CST_HIGH (arg01) == 0
11639 && TREE_INT_CST_LOW (arg01)
11640 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11642 if (TYPE_UNSIGNED (itype))
11644 itype = signed_type_for (itype);
11645 arg00 = fold_convert (itype, arg00);
11647 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11648 type, arg00, build_int_cst (itype, 0));
11652 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11653 if (integer_zerop (arg1)
11654 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11655 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11656 TREE_OPERAND (arg0, 1));
11658 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11659 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11660 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11661 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11662 build_int_cst (TREE_TYPE (arg1), 0));
11663 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11664 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11665 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11666 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11667 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11668 build_int_cst (TREE_TYPE (arg1), 0));
11670 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11671 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11672 && TREE_CODE (arg1) == INTEGER_CST
11673 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11674 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11675 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11676 TREE_OPERAND (arg0, 1), arg1));
11678 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11679 (X & C) == 0 when C is a single bit. */
11680 if (TREE_CODE (arg0) == BIT_AND_EXPR
11681 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11682 && integer_zerop (arg1)
11683 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11685 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11686 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11687 TREE_OPERAND (arg0, 1));
11688 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11692 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11693 constant C is a power of two, i.e. a single bit. */
11694 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11695 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11696 && integer_zerop (arg1)
11697 && integer_pow2p (TREE_OPERAND (arg0, 1))
11698 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11699 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11701 tree arg00 = TREE_OPERAND (arg0, 0);
11702 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11703 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11706 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11707 when is C is a power of two, i.e. a single bit. */
11708 if (TREE_CODE (arg0) == BIT_AND_EXPR
11709 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11710 && integer_zerop (arg1)
11711 && integer_pow2p (TREE_OPERAND (arg0, 1))
11712 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11713 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11715 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11716 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11717 arg000, TREE_OPERAND (arg0, 1));
11718 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11719 tem, build_int_cst (TREE_TYPE (tem), 0));
11722 if (integer_zerop (arg1)
11723 && tree_expr_nonzero_p (arg0))
11725 tree res = constant_boolean_node (code==NE_EXPR, type);
11726 return omit_one_operand (type, res, arg0);
11729 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11730 if (TREE_CODE (arg0) == NEGATE_EXPR
11731 && TREE_CODE (arg1) == NEGATE_EXPR)
11732 return fold_build2 (code, type,
11733 TREE_OPERAND (arg0, 0),
11734 TREE_OPERAND (arg1, 0));
11736 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11737 if (TREE_CODE (arg0) == BIT_AND_EXPR
11738 && TREE_CODE (arg1) == BIT_AND_EXPR)
11740 tree arg00 = TREE_OPERAND (arg0, 0);
11741 tree arg01 = TREE_OPERAND (arg0, 1);
11742 tree arg10 = TREE_OPERAND (arg1, 0);
11743 tree arg11 = TREE_OPERAND (arg1, 1);
11744 tree itype = TREE_TYPE (arg0);
11746 if (operand_equal_p (arg01, arg11, 0))
11747 return fold_build2 (code, type,
11748 fold_build2 (BIT_AND_EXPR, itype,
11749 fold_build2 (BIT_XOR_EXPR, itype,
11752 build_int_cst (itype, 0));
11754 if (operand_equal_p (arg01, arg10, 0))
11755 return fold_build2 (code, type,
11756 fold_build2 (BIT_AND_EXPR, itype,
11757 fold_build2 (BIT_XOR_EXPR, itype,
11760 build_int_cst (itype, 0));
11762 if (operand_equal_p (arg00, arg11, 0))
11763 return fold_build2 (code, type,
11764 fold_build2 (BIT_AND_EXPR, itype,
11765 fold_build2 (BIT_XOR_EXPR, itype,
11768 build_int_cst (itype, 0));
11770 if (operand_equal_p (arg00, arg10, 0))
11771 return fold_build2 (code, type,
11772 fold_build2 (BIT_AND_EXPR, itype,
11773 fold_build2 (BIT_XOR_EXPR, itype,
11776 build_int_cst (itype, 0));
11779 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11780 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11782 tree arg00 = TREE_OPERAND (arg0, 0);
11783 tree arg01 = TREE_OPERAND (arg0, 1);
11784 tree arg10 = TREE_OPERAND (arg1, 0);
11785 tree arg11 = TREE_OPERAND (arg1, 1);
11786 tree itype = TREE_TYPE (arg0);
11788 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11789 operand_equal_p guarantees no side-effects so we don't need
11790 to use omit_one_operand on Z. */
11791 if (operand_equal_p (arg01, arg11, 0))
11792 return fold_build2 (code, type, arg00, arg10);
11793 if (operand_equal_p (arg01, arg10, 0))
11794 return fold_build2 (code, type, arg00, arg11);
11795 if (operand_equal_p (arg00, arg11, 0))
11796 return fold_build2 (code, type, arg01, arg10);
11797 if (operand_equal_p (arg00, arg10, 0))
11798 return fold_build2 (code, type, arg01, arg11);
11800 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11801 if (TREE_CODE (arg01) == INTEGER_CST
11802 && TREE_CODE (arg11) == INTEGER_CST)
11803 return fold_build2 (code, type,
11804 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11805 fold_build2 (BIT_XOR_EXPR, itype,
11810 /* Attempt to simplify equality/inequality comparisons of complex
11811 values. Only lower the comparison if the result is known or
11812 can be simplified to a single scalar comparison. */
11813 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11814 || TREE_CODE (arg0) == COMPLEX_CST)
11815 && (TREE_CODE (arg1) == COMPLEX_EXPR
11816 || TREE_CODE (arg1) == COMPLEX_CST))
11818 tree real0, imag0, real1, imag1;
11821 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11823 real0 = TREE_OPERAND (arg0, 0);
11824 imag0 = TREE_OPERAND (arg0, 1);
11828 real0 = TREE_REALPART (arg0);
11829 imag0 = TREE_IMAGPART (arg0);
11832 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11834 real1 = TREE_OPERAND (arg1, 0);
11835 imag1 = TREE_OPERAND (arg1, 1);
11839 real1 = TREE_REALPART (arg1);
11840 imag1 = TREE_IMAGPART (arg1);
11843 rcond = fold_binary (code, type, real0, real1);
11844 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11846 if (integer_zerop (rcond))
11848 if (code == EQ_EXPR)
11849 return omit_two_operands (type, boolean_false_node,
11851 return fold_build2 (NE_EXPR, type, imag0, imag1);
11855 if (code == NE_EXPR)
11856 return omit_two_operands (type, boolean_true_node,
11858 return fold_build2 (EQ_EXPR, type, imag0, imag1);
11862 icond = fold_binary (code, type, imag0, imag1);
11863 if (icond && TREE_CODE (icond) == INTEGER_CST)
11865 if (integer_zerop (icond))
11867 if (code == EQ_EXPR)
11868 return omit_two_operands (type, boolean_false_node,
11870 return fold_build2 (NE_EXPR, type, real0, real1);
11874 if (code == NE_EXPR)
11875 return omit_two_operands (type, boolean_true_node,
11877 return fold_build2 (EQ_EXPR, type, real0, real1);
11888 tem = fold_comparison (code, type, op0, op1);
11889 if (tem != NULL_TREE)
11892 /* Transform comparisons of the form X +- C CMP X. */
11893 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11894 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11895 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11896 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11897 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11898 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11900 tree arg01 = TREE_OPERAND (arg0, 1);
11901 enum tree_code code0 = TREE_CODE (arg0);
11904 if (TREE_CODE (arg01) == REAL_CST)
11905 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11907 is_positive = tree_int_cst_sgn (arg01);
11909 /* (X - c) > X becomes false. */
11910 if (code == GT_EXPR
11911 && ((code0 == MINUS_EXPR && is_positive >= 0)
11912 || (code0 == PLUS_EXPR && is_positive <= 0)))
11914 if (TREE_CODE (arg01) == INTEGER_CST
11915 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11916 fold_overflow_warning (("assuming signed overflow does not "
11917 "occur when assuming that (X - c) > X "
11918 "is always false"),
11919 WARN_STRICT_OVERFLOW_ALL);
11920 return constant_boolean_node (0, type);
11923 /* Likewise (X + c) < X becomes false. */
11924 if (code == LT_EXPR
11925 && ((code0 == PLUS_EXPR && is_positive >= 0)
11926 || (code0 == MINUS_EXPR && is_positive <= 0)))
11928 if (TREE_CODE (arg01) == INTEGER_CST
11929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11930 fold_overflow_warning (("assuming signed overflow does not "
11931 "occur when assuming that "
11932 "(X + c) < X is always false"),
11933 WARN_STRICT_OVERFLOW_ALL);
11934 return constant_boolean_node (0, type);
11937 /* Convert (X - c) <= X to true. */
11938 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11940 && ((code0 == MINUS_EXPR && is_positive >= 0)
11941 || (code0 == PLUS_EXPR && is_positive <= 0)))
11943 if (TREE_CODE (arg01) == INTEGER_CST
11944 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11945 fold_overflow_warning (("assuming signed overflow does not "
11946 "occur when assuming that "
11947 "(X - c) <= X is always true"),
11948 WARN_STRICT_OVERFLOW_ALL);
11949 return constant_boolean_node (1, type);
11952 /* Convert (X + c) >= X to true. */
11953 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11955 && ((code0 == PLUS_EXPR && is_positive >= 0)
11956 || (code0 == MINUS_EXPR && is_positive <= 0)))
11958 if (TREE_CODE (arg01) == INTEGER_CST
11959 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11960 fold_overflow_warning (("assuming signed overflow does not "
11961 "occur when assuming that "
11962 "(X + c) >= X is always true"),
11963 WARN_STRICT_OVERFLOW_ALL);
11964 return constant_boolean_node (1, type);
11967 if (TREE_CODE (arg01) == INTEGER_CST)
11969 /* Convert X + c > X and X - c < X to true for integers. */
11970 if (code == GT_EXPR
11971 && ((code0 == PLUS_EXPR && is_positive > 0)
11972 || (code0 == MINUS_EXPR && is_positive < 0)))
11974 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11975 fold_overflow_warning (("assuming signed overflow does "
11976 "not occur when assuming that "
11977 "(X + c) > X is always true"),
11978 WARN_STRICT_OVERFLOW_ALL);
11979 return constant_boolean_node (1, type);
11982 if (code == LT_EXPR
11983 && ((code0 == MINUS_EXPR && is_positive > 0)
11984 || (code0 == PLUS_EXPR && is_positive < 0)))
11986 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11987 fold_overflow_warning (("assuming signed overflow does "
11988 "not occur when assuming that "
11989 "(X - c) < X is always true"),
11990 WARN_STRICT_OVERFLOW_ALL);
11991 return constant_boolean_node (1, type);
11994 /* Convert X + c <= X and X - c >= X to false for integers. */
11995 if (code == LE_EXPR
11996 && ((code0 == PLUS_EXPR && is_positive > 0)
11997 || (code0 == MINUS_EXPR && is_positive < 0)))
11999 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12000 fold_overflow_warning (("assuming signed overflow does "
12001 "not occur when assuming that "
12002 "(X + c) <= X is always false"),
12003 WARN_STRICT_OVERFLOW_ALL);
12004 return constant_boolean_node (0, type);
12007 if (code == GE_EXPR
12008 && ((code0 == MINUS_EXPR && is_positive > 0)
12009 || (code0 == PLUS_EXPR && is_positive < 0)))
12011 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12012 fold_overflow_warning (("assuming signed overflow does "
12013 "not occur when assuming that "
12014 "(X - c) >= X is always false"),
12015 WARN_STRICT_OVERFLOW_ALL);
12016 return constant_boolean_node (0, type);
12021 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12022 This transformation affects the cases which are handled in later
12023 optimizations involving comparisons with non-negative constants. */
12024 if (TREE_CODE (arg1) == INTEGER_CST
12025 && TREE_CODE (arg0) != INTEGER_CST
12026 && tree_int_cst_sgn (arg1) > 0)
12028 if (code == GE_EXPR)
12030 arg1 = const_binop (MINUS_EXPR, arg1,
12031 build_int_cst (TREE_TYPE (arg1), 1), 0);
12032 return fold_build2 (GT_EXPR, type, arg0,
12033 fold_convert (TREE_TYPE (arg0), arg1));
12035 if (code == LT_EXPR)
12037 arg1 = const_binop (MINUS_EXPR, arg1,
12038 build_int_cst (TREE_TYPE (arg1), 1), 0);
12039 return fold_build2 (LE_EXPR, type, arg0,
12040 fold_convert (TREE_TYPE (arg0), arg1));
12044 /* Comparisons with the highest or lowest possible integer of
12045 the specified precision will have known values. */
12047 tree arg1_type = TREE_TYPE (arg1);
12048 unsigned int width = TYPE_PRECISION (arg1_type);
12050 if (TREE_CODE (arg1) == INTEGER_CST
12051 && !TREE_OVERFLOW (arg1)
12052 && width <= 2 * HOST_BITS_PER_WIDE_INT
12053 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12055 HOST_WIDE_INT signed_max_hi;
12056 unsigned HOST_WIDE_INT signed_max_lo;
12057 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12059 if (width <= HOST_BITS_PER_WIDE_INT)
12061 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12066 if (TYPE_UNSIGNED (arg1_type))
12068 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12074 max_lo = signed_max_lo;
12075 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12081 width -= HOST_BITS_PER_WIDE_INT;
12082 signed_max_lo = -1;
12083 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12088 if (TYPE_UNSIGNED (arg1_type))
12090 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12095 max_hi = signed_max_hi;
12096 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12100 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12101 && TREE_INT_CST_LOW (arg1) == max_lo)
12105 return omit_one_operand (type, integer_zero_node, arg0);
12108 return fold_build2 (EQ_EXPR, type, op0, op1);
12111 return omit_one_operand (type, integer_one_node, arg0);
12114 return fold_build2 (NE_EXPR, type, op0, op1);
12116 /* The GE_EXPR and LT_EXPR cases above are not normally
12117 reached because of previous transformations. */
12122 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12124 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12128 arg1 = const_binop (PLUS_EXPR, arg1,
12129 build_int_cst (TREE_TYPE (arg1), 1), 0);
12130 return fold_build2 (EQ_EXPR, type,
12131 fold_convert (TREE_TYPE (arg1), arg0),
12134 arg1 = const_binop (PLUS_EXPR, arg1,
12135 build_int_cst (TREE_TYPE (arg1), 1), 0);
12136 return fold_build2 (NE_EXPR, type,
12137 fold_convert (TREE_TYPE (arg1), arg0),
12142 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12144 && TREE_INT_CST_LOW (arg1) == min_lo)
12148 return omit_one_operand (type, integer_zero_node, arg0);
12151 return fold_build2 (EQ_EXPR, type, op0, op1);
12154 return omit_one_operand (type, integer_one_node, arg0);
12157 return fold_build2 (NE_EXPR, type, op0, op1);
12162 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12164 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12168 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12169 return fold_build2 (NE_EXPR, type,
12170 fold_convert (TREE_TYPE (arg1), arg0),
12173 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12174 return fold_build2 (EQ_EXPR, type,
12175 fold_convert (TREE_TYPE (arg1), arg0),
12181 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12182 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12183 && TYPE_UNSIGNED (arg1_type)
12184 /* We will flip the signedness of the comparison operator
12185 associated with the mode of arg1, so the sign bit is
12186 specified by this mode. Check that arg1 is the signed
12187 max associated with this sign bit. */
12188 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12189 /* signed_type does not work on pointer types. */
12190 && INTEGRAL_TYPE_P (arg1_type))
12192 /* The following case also applies to X < signed_max+1
12193 and X >= signed_max+1 because previous transformations. */
12194 if (code == LE_EXPR || code == GT_EXPR)
12197 st = signed_type_for (TREE_TYPE (arg1));
12198 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12199 type, fold_convert (st, arg0),
12200 build_int_cst (st, 0));
12206 /* If we are comparing an ABS_EXPR with a constant, we can
12207 convert all the cases into explicit comparisons, but they may
12208 well not be faster than doing the ABS and one comparison.
12209 But ABS (X) <= C is a range comparison, which becomes a subtraction
12210 and a comparison, and is probably faster. */
12211 if (code == LE_EXPR
12212 && TREE_CODE (arg1) == INTEGER_CST
12213 && TREE_CODE (arg0) == ABS_EXPR
12214 && ! TREE_SIDE_EFFECTS (arg0)
12215 && (0 != (tem = negate_expr (arg1)))
12216 && TREE_CODE (tem) == INTEGER_CST
12217 && !TREE_OVERFLOW (tem))
12218 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12219 build2 (GE_EXPR, type,
12220 TREE_OPERAND (arg0, 0), tem),
12221 build2 (LE_EXPR, type,
12222 TREE_OPERAND (arg0, 0), arg1));
12224 /* Convert ABS_EXPR<x> >= 0 to true. */
12225 strict_overflow_p = false;
12226 if (code == GE_EXPR
12227 && (integer_zerop (arg1)
12228 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12229 && real_zerop (arg1)))
12230 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12232 if (strict_overflow_p)
12233 fold_overflow_warning (("assuming signed overflow does not occur "
12234 "when simplifying comparison of "
12235 "absolute value and zero"),
12236 WARN_STRICT_OVERFLOW_CONDITIONAL);
12237 return omit_one_operand (type, integer_one_node, arg0);
12240 /* Convert ABS_EXPR<x> < 0 to false. */
12241 strict_overflow_p = false;
12242 if (code == LT_EXPR
12243 && (integer_zerop (arg1) || real_zerop (arg1))
12244 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12246 if (strict_overflow_p)
12247 fold_overflow_warning (("assuming signed overflow does not occur "
12248 "when simplifying comparison of "
12249 "absolute value and zero"),
12250 WARN_STRICT_OVERFLOW_CONDITIONAL);
12251 return omit_one_operand (type, integer_zero_node, arg0);
12254 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12255 and similarly for >= into !=. */
12256 if ((code == LT_EXPR || code == GE_EXPR)
12257 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12258 && TREE_CODE (arg1) == LSHIFT_EXPR
12259 && integer_onep (TREE_OPERAND (arg1, 0)))
12260 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12261 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12262 TREE_OPERAND (arg1, 1)),
12263 build_int_cst (TREE_TYPE (arg0), 0));
12265 if ((code == LT_EXPR || code == GE_EXPR)
12266 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12267 && (TREE_CODE (arg1) == NOP_EXPR
12268 || TREE_CODE (arg1) == CONVERT_EXPR)
12269 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12270 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12272 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12273 fold_convert (TREE_TYPE (arg0),
12274 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12275 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12277 build_int_cst (TREE_TYPE (arg0), 0));
12281 case UNORDERED_EXPR:
12289 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12291 t1 = fold_relational_const (code, type, arg0, arg1);
12292 if (t1 != NULL_TREE)
12296 /* If the first operand is NaN, the result is constant. */
12297 if (TREE_CODE (arg0) == REAL_CST
12298 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12299 && (code != LTGT_EXPR || ! flag_trapping_math))
12301 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12302 ? integer_zero_node
12303 : integer_one_node;
12304 return omit_one_operand (type, t1, arg1);
12307 /* If the second operand is NaN, the result is constant. */
12308 if (TREE_CODE (arg1) == REAL_CST
12309 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12310 && (code != LTGT_EXPR || ! flag_trapping_math))
12312 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12313 ? integer_zero_node
12314 : integer_one_node;
12315 return omit_one_operand (type, t1, arg0);
12318 /* Simplify unordered comparison of something with itself. */
12319 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12320 && operand_equal_p (arg0, arg1, 0))
12321 return constant_boolean_node (1, type);
12323 if (code == LTGT_EXPR
12324 && !flag_trapping_math
12325 && operand_equal_p (arg0, arg1, 0))
12326 return constant_boolean_node (0, type);
12328 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12330 tree targ0 = strip_float_extensions (arg0);
12331 tree targ1 = strip_float_extensions (arg1);
12332 tree newtype = TREE_TYPE (targ0);
12334 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12335 newtype = TREE_TYPE (targ1);
12337 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12338 return fold_build2 (code, type, fold_convert (newtype, targ0),
12339 fold_convert (newtype, targ1));
12344 case COMPOUND_EXPR:
12345 /* When pedantic, a compound expression can be neither an lvalue
12346 nor an integer constant expression. */
12347 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12349 /* Don't let (0, 0) be null pointer constant. */
12350 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12351 : fold_convert (type, arg1);
12352 return pedantic_non_lvalue (tem);
12355 if ((TREE_CODE (arg0) == REAL_CST
12356 && TREE_CODE (arg1) == REAL_CST)
12357 || (TREE_CODE (arg0) == INTEGER_CST
12358 && TREE_CODE (arg1) == INTEGER_CST))
12359 return build_complex (type, arg0, arg1);
12363 /* An ASSERT_EXPR should never be passed to fold_binary. */
12364 gcc_unreachable ();
12368 } /* switch (code) */
12371 /* Callback for walk_tree, looking for LABEL_EXPR.
12372 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12373 Do not check the sub-tree of GOTO_EXPR. */
12376 contains_label_1 (tree *tp,
12377 int *walk_subtrees,
12378 void *data ATTRIBUTE_UNUSED)
12380 switch (TREE_CODE (*tp))
12385 *walk_subtrees = 0;
12392 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12393 accessible from outside the sub-tree. Returns NULL_TREE if no
12394 addressable label is found. */
12397 contains_label_p (tree st)
12399 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12402 /* Fold a ternary expression of code CODE and type TYPE with operands
12403 OP0, OP1, and OP2. Return the folded expression if folding is
12404 successful. Otherwise, return NULL_TREE. */
12407 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12410 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12411 enum tree_code_class kind = TREE_CODE_CLASS (code);
12413 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12414 && TREE_CODE_LENGTH (code) == 3);
12416 /* Strip any conversions that don't change the mode. This is safe
12417 for every expression, except for a comparison expression because
12418 its signedness is derived from its operands. So, in the latter
12419 case, only strip conversions that don't change the signedness.
12421 Note that this is done as an internal manipulation within the
12422 constant folder, in order to find the simplest representation of
12423 the arguments so that their form can be studied. In any cases,
12424 the appropriate type conversions should be put back in the tree
12425 that will get out of the constant folder. */
12440 case COMPONENT_REF:
12441 if (TREE_CODE (arg0) == CONSTRUCTOR
12442 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12444 unsigned HOST_WIDE_INT idx;
12446 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12453 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12454 so all simple results must be passed through pedantic_non_lvalue. */
12455 if (TREE_CODE (arg0) == INTEGER_CST)
12457 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12458 tem = integer_zerop (arg0) ? op2 : op1;
12459 /* Only optimize constant conditions when the selected branch
12460 has the same type as the COND_EXPR. This avoids optimizing
12461 away "c ? x : throw", where the throw has a void type.
12462 Avoid throwing away that operand which contains label. */
12463 if ((!TREE_SIDE_EFFECTS (unused_op)
12464 || !contains_label_p (unused_op))
12465 && (! VOID_TYPE_P (TREE_TYPE (tem))
12466 || VOID_TYPE_P (type)))
12467 return pedantic_non_lvalue (tem);
12470 if (operand_equal_p (arg1, op2, 0))
12471 return pedantic_omit_one_operand (type, arg1, arg0);
12473 /* If we have A op B ? A : C, we may be able to convert this to a
12474 simpler expression, depending on the operation and the values
12475 of B and C. Signed zeros prevent all of these transformations,
12476 for reasons given above each one.
12478 Also try swapping the arguments and inverting the conditional. */
12479 if (COMPARISON_CLASS_P (arg0)
12480 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12481 arg1, TREE_OPERAND (arg0, 1))
12482 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12484 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12489 if (COMPARISON_CLASS_P (arg0)
12490 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12492 TREE_OPERAND (arg0, 1))
12493 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12495 tem = fold_truth_not_expr (arg0);
12496 if (tem && COMPARISON_CLASS_P (tem))
12498 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12504 /* If the second operand is simpler than the third, swap them
12505 since that produces better jump optimization results. */
12506 if (truth_value_p (TREE_CODE (arg0))
12507 && tree_swap_operands_p (op1, op2, false))
12509 /* See if this can be inverted. If it can't, possibly because
12510 it was a floating-point inequality comparison, don't do
12512 tem = fold_truth_not_expr (arg0);
12514 return fold_build3 (code, type, tem, op2, op1);
12517 /* Convert A ? 1 : 0 to simply A. */
12518 if (integer_onep (op1)
12519 && integer_zerop (op2)
12520 /* If we try to convert OP0 to our type, the
12521 call to fold will try to move the conversion inside
12522 a COND, which will recurse. In that case, the COND_EXPR
12523 is probably the best choice, so leave it alone. */
12524 && type == TREE_TYPE (arg0))
12525 return pedantic_non_lvalue (arg0);
12527 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12528 over COND_EXPR in cases such as floating point comparisons. */
12529 if (integer_zerop (op1)
12530 && integer_onep (op2)
12531 && truth_value_p (TREE_CODE (arg0)))
12532 return pedantic_non_lvalue (fold_convert (type,
12533 invert_truthvalue (arg0)));
12535 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12536 if (TREE_CODE (arg0) == LT_EXPR
12537 && integer_zerop (TREE_OPERAND (arg0, 1))
12538 && integer_zerop (op2)
12539 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12541 /* sign_bit_p only checks ARG1 bits within A's precision.
12542 If <sign bit of A> has wider type than A, bits outside
12543 of A's precision in <sign bit of A> need to be checked.
12544 If they are all 0, this optimization needs to be done
12545 in unsigned A's type, if they are all 1 in signed A's type,
12546 otherwise this can't be done. */
12547 if (TYPE_PRECISION (TREE_TYPE (tem))
12548 < TYPE_PRECISION (TREE_TYPE (arg1))
12549 && TYPE_PRECISION (TREE_TYPE (tem))
12550 < TYPE_PRECISION (type))
12552 unsigned HOST_WIDE_INT mask_lo;
12553 HOST_WIDE_INT mask_hi;
12554 int inner_width, outer_width;
12557 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12558 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12559 if (outer_width > TYPE_PRECISION (type))
12560 outer_width = TYPE_PRECISION (type);
12562 if (outer_width > HOST_BITS_PER_WIDE_INT)
12564 mask_hi = ((unsigned HOST_WIDE_INT) -1
12565 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12571 mask_lo = ((unsigned HOST_WIDE_INT) -1
12572 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12574 if (inner_width > HOST_BITS_PER_WIDE_INT)
12576 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12577 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12581 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12582 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12584 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12585 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12587 tem_type = signed_type_for (TREE_TYPE (tem));
12588 tem = fold_convert (tem_type, tem);
12590 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12591 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12593 tem_type = unsigned_type_for (TREE_TYPE (tem));
12594 tem = fold_convert (tem_type, tem);
12601 return fold_convert (type,
12602 fold_build2 (BIT_AND_EXPR,
12603 TREE_TYPE (tem), tem,
12604 fold_convert (TREE_TYPE (tem),
12608 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
12609 already handled above. */
12610 if (TREE_CODE (arg0) == BIT_AND_EXPR
12611 && integer_onep (TREE_OPERAND (arg0, 1))
12612 && integer_zerop (op2)
12613 && integer_pow2p (arg1))
12615 tree tem = TREE_OPERAND (arg0, 0);
12617 if (TREE_CODE (tem) == RSHIFT_EXPR
12618 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
12619 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
12620 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
12621 return fold_build2 (BIT_AND_EXPR, type,
12622 TREE_OPERAND (tem, 0), arg1);
12625 /* A & N ? N : 0 is simply A & N if N is a power of two. This
12626 is probably obsolete because the first operand should be a
12627 truth value (that's why we have the two cases above), but let's
12628 leave it in until we can confirm this for all front-ends. */
12629 if (integer_zerop (op2)
12630 && TREE_CODE (arg0) == NE_EXPR
12631 && integer_zerop (TREE_OPERAND (arg0, 1))
12632 && integer_pow2p (arg1)
12633 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12634 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12635 arg1, OEP_ONLY_CONST))
12636 return pedantic_non_lvalue (fold_convert (type,
12637 TREE_OPERAND (arg0, 0)));
12639 /* Convert A ? B : 0 into A && B if A and B are truth values. */
12640 if (integer_zerop (op2)
12641 && truth_value_p (TREE_CODE (arg0))
12642 && truth_value_p (TREE_CODE (arg1)))
12643 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12644 fold_convert (type, arg0),
12647 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
12648 if (integer_onep (op2)
12649 && truth_value_p (TREE_CODE (arg0))
12650 && truth_value_p (TREE_CODE (arg1)))
12652 /* Only perform transformation if ARG0 is easily inverted. */
12653 tem = fold_truth_not_expr (arg0);
12655 return fold_build2 (TRUTH_ORIF_EXPR, type,
12656 fold_convert (type, tem),
12660 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
12661 if (integer_zerop (arg1)
12662 && truth_value_p (TREE_CODE (arg0))
12663 && truth_value_p (TREE_CODE (op2)))
12665 /* Only perform transformation if ARG0 is easily inverted. */
12666 tem = fold_truth_not_expr (arg0);
12668 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12669 fold_convert (type, tem),
12673 /* Convert A ? 1 : B into A || B if A and B are truth values. */
12674 if (integer_onep (arg1)
12675 && truth_value_p (TREE_CODE (arg0))
12676 && truth_value_p (TREE_CODE (op2)))
12677 return fold_build2 (TRUTH_ORIF_EXPR, type,
12678 fold_convert (type, arg0),
12684 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
12685 of fold_ternary on them. */
12686 gcc_unreachable ();
12688 case BIT_FIELD_REF:
12689 if ((TREE_CODE (arg0) == VECTOR_CST
12690 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
12691 && type == TREE_TYPE (TREE_TYPE (arg0))
12692 && host_integerp (arg1, 1)
12693 && host_integerp (op2, 1))
12695 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
12696 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
12699 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
12700 && (idx % width) == 0
12701 && (idx = idx / width)
12702 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
12704 tree elements = NULL_TREE;
12706 if (TREE_CODE (arg0) == VECTOR_CST)
12707 elements = TREE_VECTOR_CST_ELTS (arg0);
12710 unsigned HOST_WIDE_INT idx;
12713 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
12714 elements = tree_cons (NULL_TREE, value, elements);
12716 while (idx-- > 0 && elements)
12717 elements = TREE_CHAIN (elements);
12719 return TREE_VALUE (elements);
12721 return fold_convert (type, integer_zero_node);
12728 } /* switch (code) */
12731 /* Perform constant folding and related simplification of EXPR.
12732 The related simplifications include x*1 => x, x*0 => 0, etc.,
12733 and application of the associative law.
12734 NOP_EXPR conversions may be removed freely (as long as we
12735 are careful not to change the type of the overall expression).
12736 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12737 but we can constant-fold them if they have constant operands. */
12739 #ifdef ENABLE_FOLD_CHECKING
12740 # define fold(x) fold_1 (x)
12741 static tree fold_1 (tree);
12747 const tree t = expr;
12748 enum tree_code code = TREE_CODE (t);
12749 enum tree_code_class kind = TREE_CODE_CLASS (code);
12752 /* Return right away if a constant. */
12753 if (kind == tcc_constant)
12756 /* CALL_EXPR-like objects with variable numbers of operands are
12757 treated specially. */
12758 if (kind == tcc_vl_exp)
12760 if (code == CALL_EXPR)
12762 tem = fold_call_expr (expr, false);
12763 return tem ? tem : expr;
12768 if (IS_EXPR_CODE_CLASS (kind)
12769 || IS_GIMPLE_STMT_CODE_CLASS (kind))
12771 tree type = TREE_TYPE (t);
12772 tree op0, op1, op2;
12774 switch (TREE_CODE_LENGTH (code))
12777 op0 = TREE_OPERAND (t, 0);
12778 tem = fold_unary (code, type, op0);
12779 return tem ? tem : expr;
12781 op0 = TREE_OPERAND (t, 0);
12782 op1 = TREE_OPERAND (t, 1);
12783 tem = fold_binary (code, type, op0, op1);
12784 return tem ? tem : expr;
12786 op0 = TREE_OPERAND (t, 0);
12787 op1 = TREE_OPERAND (t, 1);
12788 op2 = TREE_OPERAND (t, 2);
12789 tem = fold_ternary (code, type, op0, op1, op2);
12790 return tem ? tem : expr;
12799 return fold (DECL_INITIAL (t));
12803 } /* switch (code) */
12806 #ifdef ENABLE_FOLD_CHECKING
12809 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12810 static void fold_check_failed (tree, tree);
12811 void print_fold_checksum (tree);
12813 /* When --enable-checking=fold, compute a digest of expr before
12814 and after actual fold call to see if fold did not accidentally
12815 change original expr. */
12821 struct md5_ctx ctx;
12822 unsigned char checksum_before[16], checksum_after[16];
12825 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12826 md5_init_ctx (&ctx);
12827 fold_checksum_tree (expr, &ctx, ht);
12828 md5_finish_ctx (&ctx, checksum_before);
12831 ret = fold_1 (expr);
12833 md5_init_ctx (&ctx);
12834 fold_checksum_tree (expr, &ctx, ht);
12835 md5_finish_ctx (&ctx, checksum_after);
12838 if (memcmp (checksum_before, checksum_after, 16))
12839 fold_check_failed (expr, ret);
12845 print_fold_checksum (tree expr)
12847 struct md5_ctx ctx;
12848 unsigned char checksum[16], cnt;
12851 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12852 md5_init_ctx (&ctx);
12853 fold_checksum_tree (expr, &ctx, ht);
12854 md5_finish_ctx (&ctx, checksum);
12856 for (cnt = 0; cnt < 16; ++cnt)
12857 fprintf (stderr, "%02x", checksum[cnt]);
12858 putc ('\n', stderr);
12862 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12864 internal_error ("fold check: original tree changed by fold");
12868 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12871 enum tree_code code;
12872 struct tree_function_decl buf;
12877 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12878 <= sizeof (struct tree_function_decl))
12879 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12882 slot = htab_find_slot (ht, expr, INSERT);
12886 code = TREE_CODE (expr);
12887 if (TREE_CODE_CLASS (code) == tcc_declaration
12888 && DECL_ASSEMBLER_NAME_SET_P (expr))
12890 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12891 memcpy ((char *) &buf, expr, tree_size (expr));
12892 expr = (tree) &buf;
12893 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12895 else if (TREE_CODE_CLASS (code) == tcc_type
12896 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12897 || TYPE_CACHED_VALUES_P (expr)
12898 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12900 /* Allow these fields to be modified. */
12901 memcpy ((char *) &buf, expr, tree_size (expr));
12902 expr = (tree) &buf;
12903 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12904 TYPE_POINTER_TO (expr) = NULL;
12905 TYPE_REFERENCE_TO (expr) = NULL;
12906 if (TYPE_CACHED_VALUES_P (expr))
12908 TYPE_CACHED_VALUES_P (expr) = 0;
12909 TYPE_CACHED_VALUES (expr) = NULL;
12912 md5_process_bytes (expr, tree_size (expr), ctx);
12913 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12914 if (TREE_CODE_CLASS (code) != tcc_type
12915 && TREE_CODE_CLASS (code) != tcc_declaration
12916 && code != TREE_LIST
12917 && code != SSA_NAME)
12918 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12919 switch (TREE_CODE_CLASS (code))
12925 md5_process_bytes (TREE_STRING_POINTER (expr),
12926 TREE_STRING_LENGTH (expr), ctx);
12929 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12930 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12933 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12939 case tcc_exceptional:
12943 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12944 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12945 expr = TREE_CHAIN (expr);
12946 goto recursive_label;
12949 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12950 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12956 case tcc_expression:
12957 case tcc_reference:
12958 case tcc_comparison:
12961 case tcc_statement:
12963 len = TREE_OPERAND_LENGTH (expr);
12964 for (i = 0; i < len; ++i)
12965 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12967 case tcc_declaration:
12968 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12969 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12970 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12972 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12973 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12974 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12975 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12976 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12978 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12979 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12981 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12983 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12984 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12985 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12989 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12990 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12991 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12992 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12993 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12994 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12995 if (INTEGRAL_TYPE_P (expr)
12996 || SCALAR_FLOAT_TYPE_P (expr))
12998 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12999 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13001 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13002 if (TREE_CODE (expr) == RECORD_TYPE
13003 || TREE_CODE (expr) == UNION_TYPE
13004 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13005 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13006 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13013 /* Helper function for outputting the checksum of a tree T. When
13014 debugging with gdb, you can "define mynext" to be "next" followed
13015 by "call debug_fold_checksum (op0)", then just trace down till the
13019 debug_fold_checksum (tree t)
13022 unsigned char checksum[16];
13023 struct md5_ctx ctx;
13024 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13026 md5_init_ctx (&ctx);
13027 fold_checksum_tree (t, &ctx, ht);
13028 md5_finish_ctx (&ctx, checksum);
13031 for (i = 0; i < 16; i++)
13032 fprintf (stderr, "%d ", checksum[i]);
13034 fprintf (stderr, "\n");
13039 /* Fold a unary tree expression with code CODE of type TYPE with an
13040 operand OP0. Return a folded expression if successful. Otherwise,
13041 return a tree expression with code CODE of type TYPE with an
13045 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13048 #ifdef ENABLE_FOLD_CHECKING
13049 unsigned char checksum_before[16], checksum_after[16];
13050 struct md5_ctx ctx;
13053 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13054 md5_init_ctx (&ctx);
13055 fold_checksum_tree (op0, &ctx, ht);
13056 md5_finish_ctx (&ctx, checksum_before);
13060 tem = fold_unary (code, type, op0);
13062 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13064 #ifdef ENABLE_FOLD_CHECKING
13065 md5_init_ctx (&ctx);
13066 fold_checksum_tree (op0, &ctx, ht);
13067 md5_finish_ctx (&ctx, checksum_after);
13070 if (memcmp (checksum_before, checksum_after, 16))
13071 fold_check_failed (op0, tem);
13076 /* Fold a binary tree expression with code CODE of type TYPE with
13077 operands OP0 and OP1. Return a folded expression if successful.
13078 Otherwise, return a tree expression with code CODE of type TYPE
13079 with operands OP0 and OP1. */
13082 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13086 #ifdef ENABLE_FOLD_CHECKING
13087 unsigned char checksum_before_op0[16],
13088 checksum_before_op1[16],
13089 checksum_after_op0[16],
13090 checksum_after_op1[16];
13091 struct md5_ctx ctx;
13094 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13095 md5_init_ctx (&ctx);
13096 fold_checksum_tree (op0, &ctx, ht);
13097 md5_finish_ctx (&ctx, checksum_before_op0);
13100 md5_init_ctx (&ctx);
13101 fold_checksum_tree (op1, &ctx, ht);
13102 md5_finish_ctx (&ctx, checksum_before_op1);
13106 tem = fold_binary (code, type, op0, op1);
13108 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13110 #ifdef ENABLE_FOLD_CHECKING
13111 md5_init_ctx (&ctx);
13112 fold_checksum_tree (op0, &ctx, ht);
13113 md5_finish_ctx (&ctx, checksum_after_op0);
13116 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13117 fold_check_failed (op0, tem);
13119 md5_init_ctx (&ctx);
13120 fold_checksum_tree (op1, &ctx, ht);
13121 md5_finish_ctx (&ctx, checksum_after_op1);
13124 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13125 fold_check_failed (op1, tem);
13130 /* Fold a ternary tree expression with code CODE of type TYPE with
13131 operands OP0, OP1, and OP2. Return a folded expression if
13132 successful. Otherwise, return a tree expression with code CODE of
13133 type TYPE with operands OP0, OP1, and OP2. */
13136 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13140 #ifdef ENABLE_FOLD_CHECKING
13141 unsigned char checksum_before_op0[16],
13142 checksum_before_op1[16],
13143 checksum_before_op2[16],
13144 checksum_after_op0[16],
13145 checksum_after_op1[16],
13146 checksum_after_op2[16];
13147 struct md5_ctx ctx;
13150 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13151 md5_init_ctx (&ctx);
13152 fold_checksum_tree (op0, &ctx, ht);
13153 md5_finish_ctx (&ctx, checksum_before_op0);
13156 md5_init_ctx (&ctx);
13157 fold_checksum_tree (op1, &ctx, ht);
13158 md5_finish_ctx (&ctx, checksum_before_op1);
13161 md5_init_ctx (&ctx);
13162 fold_checksum_tree (op2, &ctx, ht);
13163 md5_finish_ctx (&ctx, checksum_before_op2);
13167 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13168 tem = fold_ternary (code, type, op0, op1, op2);
13170 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13172 #ifdef ENABLE_FOLD_CHECKING
13173 md5_init_ctx (&ctx);
13174 fold_checksum_tree (op0, &ctx, ht);
13175 md5_finish_ctx (&ctx, checksum_after_op0);
13178 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13179 fold_check_failed (op0, tem);
13181 md5_init_ctx (&ctx);
13182 fold_checksum_tree (op1, &ctx, ht);
13183 md5_finish_ctx (&ctx, checksum_after_op1);
13186 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13187 fold_check_failed (op1, tem);
13189 md5_init_ctx (&ctx);
13190 fold_checksum_tree (op2, &ctx, ht);
13191 md5_finish_ctx (&ctx, checksum_after_op2);
13194 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13195 fold_check_failed (op2, tem);
13200 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13201 arguments in ARGARRAY, and a null static chain.
13202 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13203 of type TYPE from the given operands as constructed by build_call_array. */
13206 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13209 #ifdef ENABLE_FOLD_CHECKING
13210 unsigned char checksum_before_fn[16],
13211 checksum_before_arglist[16],
13212 checksum_after_fn[16],
13213 checksum_after_arglist[16];
13214 struct md5_ctx ctx;
13218 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13219 md5_init_ctx (&ctx);
13220 fold_checksum_tree (fn, &ctx, ht);
13221 md5_finish_ctx (&ctx, checksum_before_fn);
13224 md5_init_ctx (&ctx);
13225 for (i = 0; i < nargs; i++)
13226 fold_checksum_tree (argarray[i], &ctx, ht);
13227 md5_finish_ctx (&ctx, checksum_before_arglist);
13231 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13233 #ifdef ENABLE_FOLD_CHECKING
13234 md5_init_ctx (&ctx);
13235 fold_checksum_tree (fn, &ctx, ht);
13236 md5_finish_ctx (&ctx, checksum_after_fn);
13239 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13240 fold_check_failed (fn, tem);
13242 md5_init_ctx (&ctx);
13243 for (i = 0; i < nargs; i++)
13244 fold_checksum_tree (argarray[i], &ctx, ht);
13245 md5_finish_ctx (&ctx, checksum_after_arglist);
13248 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13249 fold_check_failed (NULL_TREE, tem);
13254 /* Perform constant folding and related simplification of initializer
13255 expression EXPR. These behave identically to "fold_buildN" but ignore
13256 potential run-time traps and exceptions that fold must preserve. */
13258 #define START_FOLD_INIT \
13259 int saved_signaling_nans = flag_signaling_nans;\
13260 int saved_trapping_math = flag_trapping_math;\
13261 int saved_rounding_math = flag_rounding_math;\
13262 int saved_trapv = flag_trapv;\
13263 int saved_folding_initializer = folding_initializer;\
13264 flag_signaling_nans = 0;\
13265 flag_trapping_math = 0;\
13266 flag_rounding_math = 0;\
13268 folding_initializer = 1;
13270 #define END_FOLD_INIT \
13271 flag_signaling_nans = saved_signaling_nans;\
13272 flag_trapping_math = saved_trapping_math;\
13273 flag_rounding_math = saved_rounding_math;\
13274 flag_trapv = saved_trapv;\
13275 folding_initializer = saved_folding_initializer;
13278 fold_build1_initializer (enum tree_code code, tree type, tree op)
13283 result = fold_build1 (code, type, op);
13290 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13295 result = fold_build2 (code, type, op0, op1);
13302 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13308 result = fold_build3 (code, type, op0, op1, op2);
13315 fold_build_call_array_initializer (tree type, tree fn,
13316 int nargs, tree *argarray)
13321 result = fold_build_call_array (type, fn, nargs, argarray);
13327 #undef START_FOLD_INIT
13328 #undef END_FOLD_INIT
13330 /* Determine if first argument is a multiple of second argument. Return 0 if
13331 it is not, or we cannot easily determined it to be.
13333 An example of the sort of thing we care about (at this point; this routine
13334 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13335 fold cases do now) is discovering that
13337 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13343 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13345 This code also handles discovering that
13347 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13349 is a multiple of 8 so we don't have to worry about dealing with a
13350 possible remainder.
13352 Note that we *look* inside a SAVE_EXPR only to determine how it was
13353 calculated; it is not safe for fold to do much of anything else with the
13354 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13355 at run time. For example, the latter example above *cannot* be implemented
13356 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13357 evaluation time of the original SAVE_EXPR is not necessarily the same at
13358 the time the new expression is evaluated. The only optimization of this
13359 sort that would be valid is changing
13361 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13365 SAVE_EXPR (I) * SAVE_EXPR (J)
13367 (where the same SAVE_EXPR (J) is used in the original and the
13368 transformed version). */
13371 multiple_of_p (tree type, tree top, tree bottom)
13373 if (operand_equal_p (top, bottom, 0))
13376 if (TREE_CODE (type) != INTEGER_TYPE)
13379 switch (TREE_CODE (top))
13382 /* Bitwise and provides a power of two multiple. If the mask is
13383 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13384 if (!integer_pow2p (bottom))
13389 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13390 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13394 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13395 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13398 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13402 op1 = TREE_OPERAND (top, 1);
13403 /* const_binop may not detect overflow correctly,
13404 so check for it explicitly here. */
13405 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13406 > TREE_INT_CST_LOW (op1)
13407 && TREE_INT_CST_HIGH (op1) == 0
13408 && 0 != (t1 = fold_convert (type,
13409 const_binop (LSHIFT_EXPR,
13412 && !TREE_OVERFLOW (t1))
13413 return multiple_of_p (type, t1, bottom);
13418 /* Can't handle conversions from non-integral or wider integral type. */
13419 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13420 || (TYPE_PRECISION (type)
13421 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13424 /* .. fall through ... */
13427 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13430 if (TREE_CODE (bottom) != INTEGER_CST
13431 || integer_zerop (bottom)
13432 || (TYPE_UNSIGNED (type)
13433 && (tree_int_cst_sgn (top) < 0
13434 || tree_int_cst_sgn (bottom) < 0)))
13436 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13444 /* Return true if `t' is known to be non-negative. If the return
13445 value is based on the assumption that signed overflow is undefined,
13446 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13447 *STRICT_OVERFLOW_P. */
13450 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
13452 if (t == error_mark_node)
13455 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13458 switch (TREE_CODE (t))
13461 /* Query VRP to see if it has recorded any information about
13462 the range of this object. */
13463 return ssa_name_nonnegative_p (t);
13466 /* We can't return 1 if flag_wrapv is set because
13467 ABS_EXPR<INT_MIN> = INT_MIN. */
13468 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13470 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
13472 *strict_overflow_p = true;
13478 return tree_int_cst_sgn (t) >= 0;
13481 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13483 case POINTER_PLUS_EXPR:
13485 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13486 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13488 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13489 strict_overflow_p));
13491 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13492 both unsigned and at least 2 bits shorter than the result. */
13493 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13494 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13495 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13497 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13498 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13499 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13500 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13502 unsigned int prec = MAX (TYPE_PRECISION (inner1),
13503 TYPE_PRECISION (inner2)) + 1;
13504 return prec < TYPE_PRECISION (TREE_TYPE (t));
13510 if (FLOAT_TYPE_P (TREE_TYPE (t)))
13512 /* x * x for floating point x is always non-negative. */
13513 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
13515 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13517 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13518 strict_overflow_p));
13521 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13522 both unsigned and their total bits is shorter than the result. */
13523 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
13524 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
13525 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
13527 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
13528 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
13529 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13530 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13531 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
13532 < TYPE_PRECISION (TREE_TYPE (t));
13538 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13540 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13541 strict_overflow_p));
13547 case TRUNC_DIV_EXPR:
13548 case CEIL_DIV_EXPR:
13549 case FLOOR_DIV_EXPR:
13550 case ROUND_DIV_EXPR:
13551 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13553 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13554 strict_overflow_p));
13556 case TRUNC_MOD_EXPR:
13557 case CEIL_MOD_EXPR:
13558 case FLOOR_MOD_EXPR:
13559 case ROUND_MOD_EXPR:
13561 case NON_LVALUE_EXPR:
13563 case FIX_TRUNC_EXPR:
13564 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13565 strict_overflow_p);
13567 case COMPOUND_EXPR:
13569 case GIMPLE_MODIFY_STMT:
13570 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13571 strict_overflow_p);
13574 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
13575 strict_overflow_p);
13578 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13580 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
13581 strict_overflow_p));
13585 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13586 tree outer_type = TREE_TYPE (t);
13588 if (TREE_CODE (outer_type) == REAL_TYPE)
13590 if (TREE_CODE (inner_type) == REAL_TYPE)
13591 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13592 strict_overflow_p);
13593 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13595 if (TYPE_UNSIGNED (inner_type))
13597 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13598 strict_overflow_p);
13601 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13603 if (TREE_CODE (inner_type) == REAL_TYPE)
13604 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
13605 strict_overflow_p);
13606 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13607 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13608 && TYPE_UNSIGNED (inner_type);
13615 tree temp = TARGET_EXPR_SLOT (t);
13616 t = TARGET_EXPR_INITIAL (t);
13618 /* If the initializer is non-void, then it's a normal expression
13619 that will be assigned to the slot. */
13620 if (!VOID_TYPE_P (t))
13621 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
13623 /* Otherwise, the initializer sets the slot in some way. One common
13624 way is an assignment statement at the end of the initializer. */
13627 if (TREE_CODE (t) == BIND_EXPR)
13628 t = expr_last (BIND_EXPR_BODY (t));
13629 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13630 || TREE_CODE (t) == TRY_CATCH_EXPR)
13631 t = expr_last (TREE_OPERAND (t, 0));
13632 else if (TREE_CODE (t) == STATEMENT_LIST)
13637 if ((TREE_CODE (t) == MODIFY_EXPR
13638 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
13639 && GENERIC_TREE_OPERAND (t, 0) == temp)
13640 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13641 strict_overflow_p);
13648 tree fndecl = get_callee_fndecl (t);
13649 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
13650 switch (DECL_FUNCTION_CODE (fndecl))
13652 CASE_FLT_FN (BUILT_IN_ACOS):
13653 CASE_FLT_FN (BUILT_IN_ACOSH):
13654 CASE_FLT_FN (BUILT_IN_CABS):
13655 CASE_FLT_FN (BUILT_IN_COSH):
13656 CASE_FLT_FN (BUILT_IN_ERFC):
13657 CASE_FLT_FN (BUILT_IN_EXP):
13658 CASE_FLT_FN (BUILT_IN_EXP10):
13659 CASE_FLT_FN (BUILT_IN_EXP2):
13660 CASE_FLT_FN (BUILT_IN_FABS):
13661 CASE_FLT_FN (BUILT_IN_FDIM):
13662 CASE_FLT_FN (BUILT_IN_HYPOT):
13663 CASE_FLT_FN (BUILT_IN_POW10):
13664 CASE_INT_FN (BUILT_IN_FFS):
13665 CASE_INT_FN (BUILT_IN_PARITY):
13666 CASE_INT_FN (BUILT_IN_POPCOUNT):
13667 case BUILT_IN_BSWAP32:
13668 case BUILT_IN_BSWAP64:
13672 CASE_FLT_FN (BUILT_IN_SQRT):
13673 /* sqrt(-0.0) is -0.0. */
13674 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
13676 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13677 strict_overflow_p);
13679 CASE_FLT_FN (BUILT_IN_ASINH):
13680 CASE_FLT_FN (BUILT_IN_ATAN):
13681 CASE_FLT_FN (BUILT_IN_ATANH):
13682 CASE_FLT_FN (BUILT_IN_CBRT):
13683 CASE_FLT_FN (BUILT_IN_CEIL):
13684 CASE_FLT_FN (BUILT_IN_ERF):
13685 CASE_FLT_FN (BUILT_IN_EXPM1):
13686 CASE_FLT_FN (BUILT_IN_FLOOR):
13687 CASE_FLT_FN (BUILT_IN_FMOD):
13688 CASE_FLT_FN (BUILT_IN_FREXP):
13689 CASE_FLT_FN (BUILT_IN_LCEIL):
13690 CASE_FLT_FN (BUILT_IN_LDEXP):
13691 CASE_FLT_FN (BUILT_IN_LFLOOR):
13692 CASE_FLT_FN (BUILT_IN_LLCEIL):
13693 CASE_FLT_FN (BUILT_IN_LLFLOOR):
13694 CASE_FLT_FN (BUILT_IN_LLRINT):
13695 CASE_FLT_FN (BUILT_IN_LLROUND):
13696 CASE_FLT_FN (BUILT_IN_LRINT):
13697 CASE_FLT_FN (BUILT_IN_LROUND):
13698 CASE_FLT_FN (BUILT_IN_MODF):
13699 CASE_FLT_FN (BUILT_IN_NEARBYINT):
13700 CASE_FLT_FN (BUILT_IN_RINT):
13701 CASE_FLT_FN (BUILT_IN_ROUND):
13702 CASE_FLT_FN (BUILT_IN_SCALB):
13703 CASE_FLT_FN (BUILT_IN_SCALBLN):
13704 CASE_FLT_FN (BUILT_IN_SCALBN):
13705 CASE_FLT_FN (BUILT_IN_SIGNBIT):
13706 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
13707 CASE_FLT_FN (BUILT_IN_SINH):
13708 CASE_FLT_FN (BUILT_IN_TANH):
13709 CASE_FLT_FN (BUILT_IN_TRUNC):
13710 /* True if the 1st argument is nonnegative. */
13711 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13712 strict_overflow_p);
13714 CASE_FLT_FN (BUILT_IN_FMAX):
13715 /* True if the 1st OR 2nd arguments are nonnegative. */
13716 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13718 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13719 strict_overflow_p)));
13721 CASE_FLT_FN (BUILT_IN_FMIN):
13722 /* True if the 1st AND 2nd arguments are nonnegative. */
13723 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13725 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13726 strict_overflow_p)));
13728 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13729 /* True if the 2nd argument is nonnegative. */
13730 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
13731 strict_overflow_p);
13733 CASE_FLT_FN (BUILT_IN_POWI):
13734 /* True if the 1st argument is nonnegative or the second
13735 argument is an even integer. */
13736 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
13738 tree arg1 = CALL_EXPR_ARG (t, 1);
13739 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
13742 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13743 strict_overflow_p);
13745 CASE_FLT_FN (BUILT_IN_POW):
13746 /* True if the 1st argument is nonnegative or the second
13747 argument is an even integer valued real. */
13748 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
13753 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
13754 n = real_to_integer (&c);
13757 REAL_VALUE_TYPE cint;
13758 real_from_integer (&cint, VOIDmode, n,
13759 n < 0 ? -1 : 0, 0);
13760 if (real_identical (&c, &cint))
13764 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
13765 strict_overflow_p);
13772 /* ... fall through ... */
13776 tree type = TREE_TYPE (t);
13777 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13778 && truth_value_p (TREE_CODE (t)))
13779 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13780 have a signed:1 type (where the value is -1 and 0). */
13785 /* We don't know sign of `t', so be conservative and return false. */
13789 /* Return true if `t' is known to be non-negative. Handle warnings
13790 about undefined signed overflow. */
13793 tree_expr_nonnegative_p (tree t)
13795 bool ret, strict_overflow_p;
13797 strict_overflow_p = false;
13798 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13799 if (strict_overflow_p)
13800 fold_overflow_warning (("assuming signed overflow does not occur when "
13801 "determining that expression is always "
13803 WARN_STRICT_OVERFLOW_MISC);
13807 /* Return true when T is an address and is known to be nonzero.
13808 For floating point we further ensure that T is not denormal.
13809 Similar logic is present in nonzero_address in rtlanal.h.
13811 If the return value is based on the assumption that signed overflow
13812 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13813 change *STRICT_OVERFLOW_P. */
13816 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13818 tree type = TREE_TYPE (t);
13819 bool sub_strict_overflow_p;
13821 /* Doing something useful for floating point would need more work. */
13822 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
13825 switch (TREE_CODE (t))
13828 /* Query VRP to see if it has recorded any information about
13829 the range of this object. */
13830 return ssa_name_nonzero_p (t);
13833 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13834 strict_overflow_p);
13837 return !integer_zerop (t);
13839 case POINTER_PLUS_EXPR:
13841 if (TYPE_OVERFLOW_UNDEFINED (type))
13843 /* With the presence of negative values it is hard
13844 to say something. */
13845 sub_strict_overflow_p = false;
13846 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13847 &sub_strict_overflow_p)
13848 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13849 &sub_strict_overflow_p))
13851 /* One of operands must be positive and the other non-negative. */
13852 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13853 overflows, on a twos-complement machine the sum of two
13854 nonnegative numbers can never be zero. */
13855 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13857 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13858 strict_overflow_p));
13863 if (TYPE_OVERFLOW_UNDEFINED (type))
13865 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13867 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13868 strict_overflow_p))
13870 *strict_overflow_p = true;
13878 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
13879 tree outer_type = TREE_TYPE (t);
13881 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13882 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13883 strict_overflow_p));
13889 tree base = get_base_address (TREE_OPERAND (t, 0));
13894 /* Weak declarations may link to NULL. */
13895 if (VAR_OR_FUNCTION_DECL_P (base))
13896 return !DECL_WEAK (base);
13898 /* Constants are never weak. */
13899 if (CONSTANT_CLASS_P (base))
13906 sub_strict_overflow_p = false;
13907 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13908 &sub_strict_overflow_p)
13909 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13910 &sub_strict_overflow_p))
13912 if (sub_strict_overflow_p)
13913 *strict_overflow_p = true;
13919 sub_strict_overflow_p = false;
13920 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13921 &sub_strict_overflow_p)
13922 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13923 &sub_strict_overflow_p))
13925 if (sub_strict_overflow_p)
13926 *strict_overflow_p = true;
13931 sub_strict_overflow_p = false;
13932 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13933 &sub_strict_overflow_p))
13935 if (sub_strict_overflow_p)
13936 *strict_overflow_p = true;
13938 /* When both operands are nonzero, then MAX must be too. */
13939 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13940 strict_overflow_p))
13943 /* MAX where operand 0 is positive is positive. */
13944 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
13945 strict_overflow_p);
13947 /* MAX where operand 1 is positive is positive. */
13948 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13949 &sub_strict_overflow_p)
13950 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
13951 &sub_strict_overflow_p))
13953 if (sub_strict_overflow_p)
13954 *strict_overflow_p = true;
13959 case COMPOUND_EXPR:
13961 case GIMPLE_MODIFY_STMT:
13963 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
13964 strict_overflow_p);
13967 case NON_LVALUE_EXPR:
13968 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13969 strict_overflow_p);
13972 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13974 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
13975 strict_overflow_p));
13978 return alloca_call_p (t);
13986 /* Return true when T is an address and is known to be nonzero.
13987 Handle warnings about undefined signed overflow. */
13990 tree_expr_nonzero_p (tree t)
13992 bool ret, strict_overflow_p;
13994 strict_overflow_p = false;
13995 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
13996 if (strict_overflow_p)
13997 fold_overflow_warning (("assuming signed overflow does not occur when "
13998 "determining that expression is always "
14000 WARN_STRICT_OVERFLOW_MISC);
14004 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14005 attempt to fold the expression to a constant without modifying TYPE,
14008 If the expression could be simplified to a constant, then return
14009 the constant. If the expression would not be simplified to a
14010 constant, then return NULL_TREE. */
14013 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14015 tree tem = fold_binary (code, type, op0, op1);
14016 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14019 /* Given the components of a unary expression CODE, TYPE and OP0,
14020 attempt to fold the expression to a constant without modifying
14023 If the expression could be simplified to a constant, then return
14024 the constant. If the expression would not be simplified to a
14025 constant, then return NULL_TREE. */
14028 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14030 tree tem = fold_unary (code, type, op0);
14031 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14034 /* If EXP represents referencing an element in a constant string
14035 (either via pointer arithmetic or array indexing), return the
14036 tree representing the value accessed, otherwise return NULL. */
14039 fold_read_from_constant_string (tree exp)
14041 if ((TREE_CODE (exp) == INDIRECT_REF
14042 || TREE_CODE (exp) == ARRAY_REF)
14043 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14045 tree exp1 = TREE_OPERAND (exp, 0);
14049 if (TREE_CODE (exp) == INDIRECT_REF)
14050 string = string_constant (exp1, &index);
14053 tree low_bound = array_ref_low_bound (exp);
14054 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14056 /* Optimize the special-case of a zero lower bound.
14058 We convert the low_bound to sizetype to avoid some problems
14059 with constant folding. (E.g. suppose the lower bound is 1,
14060 and its mode is QI. Without the conversion,l (ARRAY
14061 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14062 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14063 if (! integer_zerop (low_bound))
14064 index = size_diffop (index, fold_convert (sizetype, low_bound));
14070 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14071 && TREE_CODE (string) == STRING_CST
14072 && TREE_CODE (index) == INTEGER_CST
14073 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14074 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14076 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14077 return build_int_cst_type (TREE_TYPE (exp),
14078 (TREE_STRING_POINTER (string)
14079 [TREE_INT_CST_LOW (index)]));
14084 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14085 an integer constant or real constant.
14087 TYPE is the type of the result. */
14090 fold_negate_const (tree arg0, tree type)
14092 tree t = NULL_TREE;
14094 switch (TREE_CODE (arg0))
14098 unsigned HOST_WIDE_INT low;
14099 HOST_WIDE_INT high;
14100 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14101 TREE_INT_CST_HIGH (arg0),
14103 t = force_fit_type_double (type, low, high, 1,
14104 (overflow | TREE_OVERFLOW (arg0))
14105 && !TYPE_UNSIGNED (type));
14110 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14114 gcc_unreachable ();
14120 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14121 an integer constant or real constant.
14123 TYPE is the type of the result. */
14126 fold_abs_const (tree arg0, tree type)
14128 tree t = NULL_TREE;
14130 switch (TREE_CODE (arg0))
14133 /* If the value is unsigned, then the absolute value is
14134 the same as the ordinary value. */
14135 if (TYPE_UNSIGNED (type))
14137 /* Similarly, if the value is non-negative. */
14138 else if (INT_CST_LT (integer_minus_one_node, arg0))
14140 /* If the value is negative, then the absolute value is
14144 unsigned HOST_WIDE_INT low;
14145 HOST_WIDE_INT high;
14146 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14147 TREE_INT_CST_HIGH (arg0),
14149 t = force_fit_type_double (type, low, high, -1,
14150 overflow | TREE_OVERFLOW (arg0));
14155 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14156 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14162 gcc_unreachable ();
14168 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14169 constant. TYPE is the type of the result. */
14172 fold_not_const (tree arg0, tree type)
14174 tree t = NULL_TREE;
14176 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14178 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14179 ~TREE_INT_CST_HIGH (arg0), 0,
14180 TREE_OVERFLOW (arg0));
14185 /* Given CODE, a relational operator, the target type, TYPE and two
14186 constant operands OP0 and OP1, return the result of the
14187 relational operation. If the result is not a compile time
14188 constant, then return NULL_TREE. */
14191 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14193 int result, invert;
14195 /* From here on, the only cases we handle are when the result is
14196 known to be a constant. */
14198 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14200 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14201 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14203 /* Handle the cases where either operand is a NaN. */
14204 if (real_isnan (c0) || real_isnan (c1))
14214 case UNORDERED_EXPR:
14228 if (flag_trapping_math)
14234 gcc_unreachable ();
14237 return constant_boolean_node (result, type);
14240 return constant_boolean_node (real_compare (code, c0, c1), type);
14243 /* Handle equality/inequality of complex constants. */
14244 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14246 tree rcond = fold_relational_const (code, type,
14247 TREE_REALPART (op0),
14248 TREE_REALPART (op1));
14249 tree icond = fold_relational_const (code, type,
14250 TREE_IMAGPART (op0),
14251 TREE_IMAGPART (op1));
14252 if (code == EQ_EXPR)
14253 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14254 else if (code == NE_EXPR)
14255 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14260 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14262 To compute GT, swap the arguments and do LT.
14263 To compute GE, do LT and invert the result.
14264 To compute LE, swap the arguments, do LT and invert the result.
14265 To compute NE, do EQ and invert the result.
14267 Therefore, the code below must handle only EQ and LT. */
14269 if (code == LE_EXPR || code == GT_EXPR)
14274 code = swap_tree_comparison (code);
14277 /* Note that it is safe to invert for real values here because we
14278 have already handled the one case that it matters. */
14281 if (code == NE_EXPR || code == GE_EXPR)
14284 code = invert_tree_comparison (code, false);
14287 /* Compute a result for LT or EQ if args permit;
14288 Otherwise return T. */
14289 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14291 if (code == EQ_EXPR)
14292 result = tree_int_cst_equal (op0, op1);
14293 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14294 result = INT_CST_LT_UNSIGNED (op0, op1);
14296 result = INT_CST_LT (op0, op1);
14303 return constant_boolean_node (result, type);
14306 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14307 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14311 fold_build_cleanup_point_expr (tree type, tree expr)
14313 /* If the expression does not have side effects then we don't have to wrap
14314 it with a cleanup point expression. */
14315 if (!TREE_SIDE_EFFECTS (expr))
14318 /* If the expression is a return, check to see if the expression inside the
14319 return has no side effects or the right hand side of the modify expression
14320 inside the return. If either don't have side effects set we don't need to
14321 wrap the expression in a cleanup point expression. Note we don't check the
14322 left hand side of the modify because it should always be a return decl. */
14323 if (TREE_CODE (expr) == RETURN_EXPR)
14325 tree op = TREE_OPERAND (expr, 0);
14326 if (!op || !TREE_SIDE_EFFECTS (op))
14328 op = TREE_OPERAND (op, 1);
14329 if (!TREE_SIDE_EFFECTS (op))
14333 return build1 (CLEANUP_POINT_EXPR, type, expr);
14336 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14337 of an indirection through OP0, or NULL_TREE if no simplification is
14341 fold_indirect_ref_1 (tree type, tree op0)
14347 subtype = TREE_TYPE (sub);
14348 if (!POINTER_TYPE_P (subtype))
14351 if (TREE_CODE (sub) == ADDR_EXPR)
14353 tree op = TREE_OPERAND (sub, 0);
14354 tree optype = TREE_TYPE (op);
14355 /* *&CONST_DECL -> to the value of the const decl. */
14356 if (TREE_CODE (op) == CONST_DECL)
14357 return DECL_INITIAL (op);
14358 /* *&p => p; make sure to handle *&"str"[cst] here. */
14359 if (type == optype)
14361 tree fop = fold_read_from_constant_string (op);
14367 /* *(foo *)&fooarray => fooarray[0] */
14368 else if (TREE_CODE (optype) == ARRAY_TYPE
14369 && type == TREE_TYPE (optype))
14371 tree type_domain = TYPE_DOMAIN (optype);
14372 tree min_val = size_zero_node;
14373 if (type_domain && TYPE_MIN_VALUE (type_domain))
14374 min_val = TYPE_MIN_VALUE (type_domain);
14375 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14377 /* *(foo *)&complexfoo => __real__ complexfoo */
14378 else if (TREE_CODE (optype) == COMPLEX_TYPE
14379 && type == TREE_TYPE (optype))
14380 return fold_build1 (REALPART_EXPR, type, op);
14381 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14382 else if (TREE_CODE (optype) == VECTOR_TYPE
14383 && type == TREE_TYPE (optype))
14385 tree part_width = TYPE_SIZE (type);
14386 tree index = bitsize_int (0);
14387 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14391 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14392 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14393 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14395 tree op00 = TREE_OPERAND (sub, 0);
14396 tree op01 = TREE_OPERAND (sub, 1);
14400 op00type = TREE_TYPE (op00);
14401 if (TREE_CODE (op00) == ADDR_EXPR
14402 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
14403 && type == TREE_TYPE (TREE_TYPE (op00type)))
14405 tree size = TYPE_SIZE_UNIT (type);
14406 if (tree_int_cst_equal (size, op01))
14407 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
14411 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14412 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14413 && type == TREE_TYPE (TREE_TYPE (subtype)))
14416 tree min_val = size_zero_node;
14417 sub = build_fold_indirect_ref (sub);
14418 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14419 if (type_domain && TYPE_MIN_VALUE (type_domain))
14420 min_val = TYPE_MIN_VALUE (type_domain);
14421 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
14427 /* Builds an expression for an indirection through T, simplifying some
14431 build_fold_indirect_ref (tree t)
14433 tree type = TREE_TYPE (TREE_TYPE (t));
14434 tree sub = fold_indirect_ref_1 (type, t);
14439 return build1 (INDIRECT_REF, type, t);
14442 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14445 fold_indirect_ref (tree t)
14447 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
14455 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14456 whose result is ignored. The type of the returned tree need not be
14457 the same as the original expression. */
14460 fold_ignored_result (tree t)
14462 if (!TREE_SIDE_EFFECTS (t))
14463 return integer_zero_node;
14466 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14469 t = TREE_OPERAND (t, 0);
14473 case tcc_comparison:
14474 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14475 t = TREE_OPERAND (t, 0);
14476 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14477 t = TREE_OPERAND (t, 1);
14482 case tcc_expression:
14483 switch (TREE_CODE (t))
14485 case COMPOUND_EXPR:
14486 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14488 t = TREE_OPERAND (t, 0);
14492 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14493 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14495 t = TREE_OPERAND (t, 0);
14508 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
14509 This can only be applied to objects of a sizetype. */
14512 round_up (tree value, int divisor)
14514 tree div = NULL_TREE;
14516 gcc_assert (divisor > 0);
14520 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14521 have to do anything. Only do this when we are not given a const,
14522 because in that case, this check is more expensive than just
14524 if (TREE_CODE (value) != INTEGER_CST)
14526 div = build_int_cst (TREE_TYPE (value), divisor);
14528 if (multiple_of_p (TREE_TYPE (value), value, div))
14532 /* If divisor is a power of two, simplify this to bit manipulation. */
14533 if (divisor == (divisor & -divisor))
14535 if (TREE_CODE (value) == INTEGER_CST)
14537 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
14538 unsigned HOST_WIDE_INT high;
14541 if ((low & (divisor - 1)) == 0)
14544 overflow_p = TREE_OVERFLOW (value);
14545 high = TREE_INT_CST_HIGH (value);
14546 low &= ~(divisor - 1);
14555 return force_fit_type_double (TREE_TYPE (value), low, high,
14562 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14563 value = size_binop (PLUS_EXPR, value, t);
14564 t = build_int_cst (TREE_TYPE (value), -divisor);
14565 value = size_binop (BIT_AND_EXPR, value, t);
14571 div = build_int_cst (TREE_TYPE (value), divisor);
14572 value = size_binop (CEIL_DIV_EXPR, value, div);
14573 value = size_binop (MULT_EXPR, value, div);
14579 /* Likewise, but round down. */
14582 round_down (tree value, int divisor)
14584 tree div = NULL_TREE;
14586 gcc_assert (divisor > 0);
14590 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14591 have to do anything. Only do this when we are not given a const,
14592 because in that case, this check is more expensive than just
14594 if (TREE_CODE (value) != INTEGER_CST)
14596 div = build_int_cst (TREE_TYPE (value), divisor);
14598 if (multiple_of_p (TREE_TYPE (value), value, div))
14602 /* If divisor is a power of two, simplify this to bit manipulation. */
14603 if (divisor == (divisor & -divisor))
14607 t = build_int_cst (TREE_TYPE (value), -divisor);
14608 value = size_binop (BIT_AND_EXPR, value, t);
14613 div = build_int_cst (TREE_TYPE (value), divisor);
14614 value = size_binop (FLOOR_DIV_EXPR, value, div);
14615 value = size_binop (MULT_EXPR, value, div);
14621 /* Returns the pointer to the base of the object addressed by EXP and
14622 extracts the information about the offset of the access, storing it
14623 to PBITPOS and POFFSET. */
14626 split_address_to_core_and_offset (tree exp,
14627 HOST_WIDE_INT *pbitpos, tree *poffset)
14630 enum machine_mode mode;
14631 int unsignedp, volatilep;
14632 HOST_WIDE_INT bitsize;
14634 if (TREE_CODE (exp) == ADDR_EXPR)
14636 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14637 poffset, &mode, &unsignedp, &volatilep,
14639 core = fold_addr_expr (core);
14645 *poffset = NULL_TREE;
14651 /* Returns true if addresses of E1 and E2 differ by a constant, false
14652 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14655 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14658 HOST_WIDE_INT bitpos1, bitpos2;
14659 tree toffset1, toffset2, tdiff, type;
14661 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14662 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14664 if (bitpos1 % BITS_PER_UNIT != 0
14665 || bitpos2 % BITS_PER_UNIT != 0
14666 || !operand_equal_p (core1, core2, 0))
14669 if (toffset1 && toffset2)
14671 type = TREE_TYPE (toffset1);
14672 if (type != TREE_TYPE (toffset2))
14673 toffset2 = fold_convert (type, toffset2);
14675 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14676 if (!cst_and_fits_in_hwi (tdiff))
14679 *diff = int_cst_value (tdiff);
14681 else if (toffset1 || toffset2)
14683 /* If only one of the offsets is non-constant, the difference cannot
14690 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14694 /* Simplify the floating point expression EXP when the sign of the
14695 result is not significant. Return NULL_TREE if no simplification
14699 fold_strip_sign_ops (tree exp)
14703 switch (TREE_CODE (exp))
14707 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14708 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
14712 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
14714 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
14715 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14716 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
14717 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
14718 arg0 ? arg0 : TREE_OPERAND (exp, 0),
14719 arg1 ? arg1 : TREE_OPERAND (exp, 1));
14722 case COMPOUND_EXPR:
14723 arg0 = TREE_OPERAND (exp, 0);
14724 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14726 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
14730 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
14731 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
14733 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
14734 arg0 ? arg0 : TREE_OPERAND (exp, 1),
14735 arg1 ? arg1 : TREE_OPERAND (exp, 2));
14740 const enum built_in_function fcode = builtin_mathfn_code (exp);
14743 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14744 /* Strip copysign function call, return the 1st argument. */
14745 arg0 = CALL_EXPR_ARG (exp, 0);
14746 arg1 = CALL_EXPR_ARG (exp, 1);
14747 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
14750 /* Strip sign ops from the argument of "odd" math functions. */
14751 if (negate_mathfn_p (fcode))
14753 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
14755 return build_call_expr (get_callee_fndecl (exp), 1, arg0);