1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
63 #include "langhooks.h"
66 /* Non-zero if we are folding constants inside an initializer; zero
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
92 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
93 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static tree combine_comparisons (enum tree_code, enum tree_code,
103 enum tree_code, tree, tree, tree);
104 static int truth_value_p (enum tree_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand (tree, tree, tree);
109 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (tree, tree, int, int, int);
111 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
115 static int all_ones_mask_p (tree, int);
116 static tree sign_bit_p (tree, tree);
117 static int simple_operand_p (tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
132 static int multiple_of_p (tree, tree, tree);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static int native_encode_expr (tree, unsigned char *, int);
146 static tree native_interpret_expr (tree, unsigned char *, int);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
160 We do that by representing the two-word integer in 4 words, with only
161 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
162 number. The value of the word is LOWPART + HIGHPART * BASE. */
165 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
166 #define HIGHPART(x) \
167 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
168 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170 /* Unpack a two-word integer into 4 words.
171 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
172 WORDS points to the array of HOST_WIDE_INTs. */
175 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 words[0] = LOWPART (low);
178 words[1] = HIGHPART (low);
179 words[2] = LOWPART (hi);
180 words[3] = HIGHPART (hi);
183 /* Pack an array of 4 words into a two-word integer.
184 WORDS points to the array of words.
185 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
188 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
191 *low = words[0] + words[1] * BASE;
192 *hi = words[2] + words[3] * BASE;
195 /* Force the double-word integer L1, H1 to be within the range of the
196 integer type TYPE. Stores the properly truncated and sign-extended
197 double-word integer in *LV, *HV. Returns true if the operation
198 overflows, that is, argument and result are different. */
201 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
202 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
204 unsigned HOST_WIDE_INT low0 = l1;
205 HOST_WIDE_INT high0 = h1;
207 int sign_extended_type;
209 if (POINTER_TYPE_P (type)
210 || TREE_CODE (type) == OFFSET_TYPE)
213 prec = TYPE_PRECISION (type);
215 /* Size types *are* sign extended. */
216 sign_extended_type = (!TYPE_UNSIGNED (type)
217 || (TREE_CODE (type) == INTEGER_TYPE
218 && TYPE_IS_SIZETYPE (type)));
220 /* First clear all bits that are beyond the type's precision. */
221 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
223 else if (prec > HOST_BITS_PER_WIDE_INT)
224 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
228 if (prec < HOST_BITS_PER_WIDE_INT)
229 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
232 /* Then do sign extension if necessary. */
233 if (!sign_extended_type)
234 /* No sign extension */;
235 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
236 /* Correct width already. */;
237 else if (prec > HOST_BITS_PER_WIDE_INT)
239 /* Sign extend top half? */
240 if (h1 & ((unsigned HOST_WIDE_INT)1
241 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
242 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 else if (prec == HOST_BITS_PER_WIDE_INT)
246 if ((HOST_WIDE_INT)l1 < 0)
251 /* Sign extend bottom half? */
252 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 l1 |= (HOST_WIDE_INT)(-1) << prec;
262 /* If the value didn't fit, signal overflow. */
263 return l1 != low0 || h1 != high0;
266 /* We force the double-int HIGH:LOW to the range of the type TYPE by
267 sign or zero extending it.
268 OVERFLOWABLE indicates if we are interested
269 in overflow of the value, when >0 we are only interested in signed
270 overflow, for <0 we are interested in any overflow. OVERFLOWED
271 indicates whether overflow has already occurred. CONST_OVERFLOWED
272 indicates whether constant overflow has already occurred. We force
273 T's value to be within range of T's type (by setting to 0 or 1 all
274 the bits outside the type's range). We set TREE_OVERFLOWED if,
275 OVERFLOWED is nonzero,
276 or OVERFLOWABLE is >0 and signed overflow occurs
277 or OVERFLOWABLE is <0 and any overflow occurs
278 We set TREE_CONSTANT_OVERFLOWED if,
279 CONST_OVERFLOWED is nonzero
280 or we set TREE_OVERFLOWED.
281 We return a new tree node for the extended double-int. The node
282 is shared if no overflow flags are set. */
285 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
286 HOST_WIDE_INT high, int overflowable,
287 bool overflowed, bool overflowed_const)
289 int sign_extended_type;
292 /* Size types *are* sign extended. */
293 sign_extended_type = (!TYPE_UNSIGNED (type)
294 || (TREE_CODE (type) == INTEGER_TYPE
295 && TYPE_IS_SIZETYPE (type)));
297 overflow = fit_double_type (low, high, &low, &high, type);
299 /* If we need to set overflow flags, return a new unshared node. */
300 if (overflowed || overflowed_const || overflow)
304 || (overflowable > 0 && sign_extended_type))
306 tree t = make_node (INTEGER_CST);
307 TREE_INT_CST_LOW (t) = low;
308 TREE_INT_CST_HIGH (t) = high;
309 TREE_TYPE (t) = type;
310 TREE_OVERFLOW (t) = 1;
311 TREE_CONSTANT_OVERFLOW (t) = 1;
315 else if (overflowed_const)
317 tree t = make_node (INTEGER_CST);
318 TREE_INT_CST_LOW (t) = low;
319 TREE_INT_CST_HIGH (t) = high;
320 TREE_TYPE (t) = type;
321 TREE_CONSTANT_OVERFLOW (t) = 1;
327 /* Else build a shared node. */
328 return build_int_cst_wide (type, low, high);
331 /* Add two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows according to UNSIGNED_P.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
343 unsigned HOST_WIDE_INT l;
347 h = h1 + h2 + (l < l1);
353 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
355 return OVERFLOW_SUM_SIGN (h1, h2, h);
358 /* Negate a doubleword integer with doubleword result.
359 Return nonzero if the operation overflows, assuming it's signed.
360 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
361 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
364 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
365 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
371 return (*hv & h1) < 0;
381 /* Multiply two doubleword integers with doubleword result.
382 Return nonzero if the operation overflows according to UNSIGNED_P.
383 Each argument is given as two `HOST_WIDE_INT' pieces.
384 One argument is L1 and H1; the other, L2 and H2.
385 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
388 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
389 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
390 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
393 HOST_WIDE_INT arg1[4];
394 HOST_WIDE_INT arg2[4];
395 HOST_WIDE_INT prod[4 * 2];
396 unsigned HOST_WIDE_INT carry;
398 unsigned HOST_WIDE_INT toplow, neglow;
399 HOST_WIDE_INT tophigh, neghigh;
401 encode (arg1, l1, h1);
402 encode (arg2, l2, h2);
404 memset (prod, 0, sizeof prod);
406 for (i = 0; i < 4; i++)
409 for (j = 0; j < 4; j++)
412 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
413 carry += arg1[i] * arg2[j];
414 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
416 prod[k] = LOWPART (carry);
417 carry = HIGHPART (carry);
422 decode (prod, lv, hv);
423 decode (prod + 4, &toplow, &tophigh);
425 /* Unsigned overflow is immediate. */
427 return (toplow | tophigh) != 0;
429 /* Check for signed overflow by calculating the signed representation of the
430 top half of the result; it should agree with the low half's sign bit. */
433 neg_double (l2, h2, &neglow, &neghigh);
434 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
438 neg_double (l1, h1, &neglow, &neghigh);
439 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
441 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
444 /* Shift the doubleword integer in L1, H1 left by COUNT places
445 keeping only PREC bits of result.
446 Shift right if COUNT is negative.
447 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
448 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
451 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
452 HOST_WIDE_INT count, unsigned int prec,
453 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
455 unsigned HOST_WIDE_INT signmask;
459 rshift_double (l1, h1, -count, prec, lv, hv, arith);
463 if (SHIFT_COUNT_TRUNCATED)
466 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
468 /* Shifting by the host word size is undefined according to the
469 ANSI standard, so we must handle this as a special case. */
473 else if (count >= HOST_BITS_PER_WIDE_INT)
475 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
480 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
481 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
485 /* Sign extend all bits that are beyond the precision. */
487 signmask = -((prec > HOST_BITS_PER_WIDE_INT
488 ? ((unsigned HOST_WIDE_INT) *hv
489 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
490 : (*lv >> (prec - 1))) & 1);
492 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
494 else if (prec >= HOST_BITS_PER_WIDE_INT)
496 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
497 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
502 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
503 *lv |= signmask << prec;
507 /* Shift the doubleword integer in L1, H1 right by COUNT places
508 keeping only PREC bits of result. COUNT must be positive.
509 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
514 HOST_WIDE_INT count, unsigned int prec,
515 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
518 unsigned HOST_WIDE_INT signmask;
521 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
524 if (SHIFT_COUNT_TRUNCATED)
527 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
529 /* Shifting by the host word size is undefined according to the
530 ANSI standard, so we must handle this as a special case. */
534 else if (count >= HOST_BITS_PER_WIDE_INT)
537 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
541 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
543 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
546 /* Zero / sign extend all bits that are beyond the precision. */
548 if (count >= (HOST_WIDE_INT)prec)
553 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
555 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
557 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
558 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
563 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
564 *lv |= signmask << (prec - count);
568 /* Rotate the doubleword integer in L1, H1 left by COUNT places
569 keeping only PREC bits of result.
570 Rotate right if COUNT is negative.
571 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
574 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
575 HOST_WIDE_INT count, unsigned int prec,
576 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
578 unsigned HOST_WIDE_INT s1l, s2l;
579 HOST_WIDE_INT s1h, s2h;
585 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
586 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
591 /* Rotate the doubleword integer in L1, H1 left by COUNT places
592 keeping only PREC bits of result. COUNT must be positive.
593 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
596 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
597 HOST_WIDE_INT count, unsigned int prec,
598 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
600 unsigned HOST_WIDE_INT s1l, s2l;
601 HOST_WIDE_INT s1h, s2h;
607 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
608 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
613 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
614 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
615 CODE is a tree code for a kind of division, one of
616 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
618 It controls how the quotient is rounded to an integer.
619 Return nonzero if the operation overflows.
620 UNS nonzero says do unsigned division. */
623 div_and_round_double (enum tree_code code, int uns,
624 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
625 HOST_WIDE_INT hnum_orig,
626 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
627 HOST_WIDE_INT hden_orig,
628 unsigned HOST_WIDE_INT *lquo,
629 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
633 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
634 HOST_WIDE_INT den[4], quo[4];
636 unsigned HOST_WIDE_INT work;
637 unsigned HOST_WIDE_INT carry = 0;
638 unsigned HOST_WIDE_INT lnum = lnum_orig;
639 HOST_WIDE_INT hnum = hnum_orig;
640 unsigned HOST_WIDE_INT lden = lden_orig;
641 HOST_WIDE_INT hden = hden_orig;
644 if (hden == 0 && lden == 0)
645 overflow = 1, lden = 1;
647 /* Calculate quotient sign and convert operands to unsigned. */
653 /* (minimum integer) / (-1) is the only overflow case. */
654 if (neg_double (lnum, hnum, &lnum, &hnum)
655 && ((HOST_WIDE_INT) lden & hden) == -1)
661 neg_double (lden, hden, &lden, &hden);
665 if (hnum == 0 && hden == 0)
666 { /* single precision */
668 /* This unsigned division rounds toward zero. */
674 { /* trivial case: dividend < divisor */
675 /* hden != 0 already checked. */
682 memset (quo, 0, sizeof quo);
684 memset (num, 0, sizeof num); /* to zero 9th element */
685 memset (den, 0, sizeof den);
687 encode (num, lnum, hnum);
688 encode (den, lden, hden);
690 /* Special code for when the divisor < BASE. */
691 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
693 /* hnum != 0 already checked. */
694 for (i = 4 - 1; i >= 0; i--)
696 work = num[i] + carry * BASE;
697 quo[i] = work / lden;
703 /* Full double precision division,
704 with thanks to Don Knuth's "Seminumerical Algorithms". */
705 int num_hi_sig, den_hi_sig;
706 unsigned HOST_WIDE_INT quo_est, scale;
708 /* Find the highest nonzero divisor digit. */
709 for (i = 4 - 1;; i--)
716 /* Insure that the first digit of the divisor is at least BASE/2.
717 This is required by the quotient digit estimation algorithm. */
719 scale = BASE / (den[den_hi_sig] + 1);
721 { /* scale divisor and dividend */
723 for (i = 0; i <= 4 - 1; i++)
725 work = (num[i] * scale) + carry;
726 num[i] = LOWPART (work);
727 carry = HIGHPART (work);
732 for (i = 0; i <= 4 - 1; i++)
734 work = (den[i] * scale) + carry;
735 den[i] = LOWPART (work);
736 carry = HIGHPART (work);
737 if (den[i] != 0) den_hi_sig = i;
744 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
746 /* Guess the next quotient digit, quo_est, by dividing the first
747 two remaining dividend digits by the high order quotient digit.
748 quo_est is never low and is at most 2 high. */
749 unsigned HOST_WIDE_INT tmp;
751 num_hi_sig = i + den_hi_sig + 1;
752 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
753 if (num[num_hi_sig] != den[den_hi_sig])
754 quo_est = work / den[den_hi_sig];
758 /* Refine quo_est so it's usually correct, and at most one high. */
759 tmp = work - quo_est * den[den_hi_sig];
761 && (den[den_hi_sig - 1] * quo_est
762 > (tmp * BASE + num[num_hi_sig - 2])))
765 /* Try QUO_EST as the quotient digit, by multiplying the
766 divisor by QUO_EST and subtracting from the remaining dividend.
767 Keep in mind that QUO_EST is the I - 1st digit. */
770 for (j = 0; j <= den_hi_sig; j++)
772 work = quo_est * den[j] + carry;
773 carry = HIGHPART (work);
774 work = num[i + j] - LOWPART (work);
775 num[i + j] = LOWPART (work);
776 carry += HIGHPART (work) != 0;
779 /* If quo_est was high by one, then num[i] went negative and
780 we need to correct things. */
781 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
784 carry = 0; /* add divisor back in */
785 for (j = 0; j <= den_hi_sig; j++)
787 work = num[i + j] + den[j] + carry;
788 carry = HIGHPART (work);
789 num[i + j] = LOWPART (work);
792 num [num_hi_sig] += carry;
795 /* Store the quotient digit. */
800 decode (quo, lquo, hquo);
803 /* If result is negative, make it so. */
805 neg_double (*lquo, *hquo, lquo, hquo);
807 /* Compute trial remainder: rem = num - (quo * den) */
808 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
809 neg_double (*lrem, *hrem, lrem, hrem);
810 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
815 case TRUNC_MOD_EXPR: /* round toward zero */
816 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
820 case FLOOR_MOD_EXPR: /* round toward negative infinity */
821 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
824 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
832 case CEIL_MOD_EXPR: /* round toward positive infinity */
833 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
835 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
843 case ROUND_MOD_EXPR: /* round to closest integer */
845 unsigned HOST_WIDE_INT labs_rem = *lrem;
846 HOST_WIDE_INT habs_rem = *hrem;
847 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
848 HOST_WIDE_INT habs_den = hden, htwice;
850 /* Get absolute values. */
852 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
854 neg_double (lden, hden, &labs_den, &habs_den);
856 /* If (2 * abs (lrem) >= abs (lden)) */
857 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
858 labs_rem, habs_rem, <wice, &htwice);
860 if (((unsigned HOST_WIDE_INT) habs_den
861 < (unsigned HOST_WIDE_INT) htwice)
862 || (((unsigned HOST_WIDE_INT) habs_den
863 == (unsigned HOST_WIDE_INT) htwice)
864 && (labs_den < ltwice)))
868 add_double (*lquo, *hquo,
869 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
872 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
884 /* Compute true remainder: rem = num - (quo * den) */
885 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
886 neg_double (*lrem, *hrem, lrem, hrem);
887 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
891 /* If ARG2 divides ARG1 with zero remainder, carries out the division
892 of type CODE and returns the quotient.
893 Otherwise returns NULL_TREE. */
896 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
898 unsigned HOST_WIDE_INT int1l, int2l;
899 HOST_WIDE_INT int1h, int2h;
900 unsigned HOST_WIDE_INT quol, reml;
901 HOST_WIDE_INT quoh, remh;
902 tree type = TREE_TYPE (arg1);
903 int uns = TYPE_UNSIGNED (type);
905 int1l = TREE_INT_CST_LOW (arg1);
906 int1h = TREE_INT_CST_HIGH (arg1);
907 int2l = TREE_INT_CST_LOW (arg2);
908 int2h = TREE_INT_CST_HIGH (arg2);
910 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
911 &quol, &quoh, &reml, &remh);
912 if (remh != 0 || reml != 0)
915 return build_int_cst_wide (type, quol, quoh);
918 /* Return true if the built-in mathematical function specified by CODE
919 is odd, i.e. -f(x) == f(-x). */
922 negate_mathfn_p (enum built_in_function code)
926 CASE_FLT_FN (BUILT_IN_ASIN):
927 CASE_FLT_FN (BUILT_IN_ASINH):
928 CASE_FLT_FN (BUILT_IN_ATAN):
929 CASE_FLT_FN (BUILT_IN_ATANH):
930 CASE_FLT_FN (BUILT_IN_CBRT):
931 CASE_FLT_FN (BUILT_IN_ERF):
932 CASE_FLT_FN (BUILT_IN_LLROUND):
933 CASE_FLT_FN (BUILT_IN_LROUND):
934 CASE_FLT_FN (BUILT_IN_ROUND):
935 CASE_FLT_FN (BUILT_IN_SIN):
936 CASE_FLT_FN (BUILT_IN_SINH):
937 CASE_FLT_FN (BUILT_IN_TAN):
938 CASE_FLT_FN (BUILT_IN_TANH):
939 CASE_FLT_FN (BUILT_IN_TRUNC):
942 CASE_FLT_FN (BUILT_IN_LLRINT):
943 CASE_FLT_FN (BUILT_IN_LRINT):
944 CASE_FLT_FN (BUILT_IN_NEARBYINT):
945 CASE_FLT_FN (BUILT_IN_RINT):
946 return !flag_rounding_math;
954 /* Check whether we may negate an integer constant T without causing
958 may_negate_without_overflow_p (tree t)
960 unsigned HOST_WIDE_INT val;
964 gcc_assert (TREE_CODE (t) == INTEGER_CST);
966 type = TREE_TYPE (t);
967 if (TYPE_UNSIGNED (type))
970 prec = TYPE_PRECISION (type);
971 if (prec > HOST_BITS_PER_WIDE_INT)
973 if (TREE_INT_CST_LOW (t) != 0)
975 prec -= HOST_BITS_PER_WIDE_INT;
976 val = TREE_INT_CST_HIGH (t);
979 val = TREE_INT_CST_LOW (t);
980 if (prec < HOST_BITS_PER_WIDE_INT)
981 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
982 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
985 /* Determine whether an expression T can be cheaply negated using
986 the function negate_expr without introducing undefined overflow. */
989 negate_expr_p (tree t)
996 type = TREE_TYPE (t);
999 switch (TREE_CODE (t))
1002 if (TYPE_UNSIGNED (type)
1003 || (flag_wrapv && ! flag_trapv))
1006 /* Check that -CST will not overflow type. */
1007 return may_negate_without_overflow_p (t);
1009 return INTEGRAL_TYPE_P (type)
1010 && (TYPE_UNSIGNED (type)
1011 || (flag_wrapv && !flag_trapv));
1018 return negate_expr_p (TREE_REALPART (t))
1019 && negate_expr_p (TREE_IMAGPART (t));
1022 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1023 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1025 /* -(A + B) -> (-B) - A. */
1026 if (negate_expr_p (TREE_OPERAND (t, 1))
1027 && reorder_operands_p (TREE_OPERAND (t, 0),
1028 TREE_OPERAND (t, 1)))
1030 /* -(A + B) -> (-A) - B. */
1031 return negate_expr_p (TREE_OPERAND (t, 0));
1034 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1035 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1036 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1037 && reorder_operands_p (TREE_OPERAND (t, 0),
1038 TREE_OPERAND (t, 1));
1041 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1047 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1048 return negate_expr_p (TREE_OPERAND (t, 1))
1049 || negate_expr_p (TREE_OPERAND (t, 0));
1052 case TRUNC_DIV_EXPR:
1053 case ROUND_DIV_EXPR:
1054 case FLOOR_DIV_EXPR:
1056 case EXACT_DIV_EXPR:
1057 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1059 return negate_expr_p (TREE_OPERAND (t, 1))
1060 || negate_expr_p (TREE_OPERAND (t, 0));
1063 /* Negate -((double)float) as (double)(-float). */
1064 if (TREE_CODE (type) == REAL_TYPE)
1066 tree tem = strip_float_extensions (t);
1068 return negate_expr_p (tem);
1073 /* Negate -f(x) as f(-x). */
1074 if (negate_mathfn_p (builtin_mathfn_code (t)))
1075 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1079 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1080 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1082 tree op1 = TREE_OPERAND (t, 1);
1083 if (TREE_INT_CST_HIGH (op1) == 0
1084 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1085 == TREE_INT_CST_LOW (op1))
1096 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1097 simplification is possible.
1098 If negate_expr_p would return true for T, NULL_TREE will never be
1102 fold_negate_expr (tree t)
1104 tree type = TREE_TYPE (t);
1107 switch (TREE_CODE (t))
1109 /* Convert - (~A) to A + 1. */
1111 if (INTEGRAL_TYPE_P (type))
1112 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1113 build_int_cst (type, 1));
1117 tem = fold_negate_const (t, type);
1118 if (! TREE_OVERFLOW (tem)
1119 || TYPE_UNSIGNED (type)
1125 tem = fold_negate_const (t, type);
1126 /* Two's complement FP formats, such as c4x, may overflow. */
1127 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1133 tree rpart = negate_expr (TREE_REALPART (t));
1134 tree ipart = negate_expr (TREE_IMAGPART (t));
1136 if ((TREE_CODE (rpart) == REAL_CST
1137 && TREE_CODE (ipart) == REAL_CST)
1138 || (TREE_CODE (rpart) == INTEGER_CST
1139 && TREE_CODE (ipart) == INTEGER_CST))
1140 return build_complex (type, rpart, ipart);
1145 return TREE_OPERAND (t, 0);
1148 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1156 tem = negate_expr (TREE_OPERAND (t, 1));
1157 return fold_build2 (MINUS_EXPR, type,
1158 tem, TREE_OPERAND (t, 0));
1161 /* -(A + B) -> (-A) - B. */
1162 if (negate_expr_p (TREE_OPERAND (t, 0)))
1164 tem = negate_expr (TREE_OPERAND (t, 0));
1165 return fold_build2 (MINUS_EXPR, type,
1166 tem, TREE_OPERAND (t, 1));
1172 /* - (A - B) -> B - A */
1173 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1174 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1175 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1176 return fold_build2 (MINUS_EXPR, type,
1177 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1181 if (TYPE_UNSIGNED (type))
1187 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1189 tem = TREE_OPERAND (t, 1);
1190 if (negate_expr_p (tem))
1191 return fold_build2 (TREE_CODE (t), type,
1192 TREE_OPERAND (t, 0), negate_expr (tem));
1193 tem = TREE_OPERAND (t, 0);
1194 if (negate_expr_p (tem))
1195 return fold_build2 (TREE_CODE (t), type,
1196 negate_expr (tem), TREE_OPERAND (t, 1));
1200 case TRUNC_DIV_EXPR:
1201 case ROUND_DIV_EXPR:
1202 case FLOOR_DIV_EXPR:
1204 case EXACT_DIV_EXPR:
1205 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1207 tem = TREE_OPERAND (t, 1);
1208 if (negate_expr_p (tem))
1209 return fold_build2 (TREE_CODE (t), type,
1210 TREE_OPERAND (t, 0), negate_expr (tem));
1211 tem = TREE_OPERAND (t, 0);
1212 if (negate_expr_p (tem))
1213 return fold_build2 (TREE_CODE (t), type,
1214 negate_expr (tem), TREE_OPERAND (t, 1));
1219 /* Convert -((double)float) into (double)(-float). */
1220 if (TREE_CODE (type) == REAL_TYPE)
1222 tem = strip_float_extensions (t);
1223 if (tem != t && negate_expr_p (tem))
1224 return negate_expr (tem);
1229 /* Negate -f(x) as f(-x). */
1230 if (negate_mathfn_p (builtin_mathfn_code (t))
1231 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1233 tree fndecl, arg, arglist;
1235 fndecl = get_callee_fndecl (t);
1236 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1237 arglist = build_tree_list (NULL_TREE, arg);
1238 return build_function_call_expr (fndecl, arglist);
1243 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1244 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1246 tree op1 = TREE_OPERAND (t, 1);
1247 if (TREE_INT_CST_HIGH (op1) == 0
1248 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1249 == TREE_INT_CST_LOW (op1))
1251 tree ntype = TYPE_UNSIGNED (type)
1252 ? lang_hooks.types.signed_type (type)
1253 : lang_hooks.types.unsigned_type (type);
1254 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1255 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1256 return fold_convert (type, temp);
1268 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1269 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1270 return NULL_TREE. */
1273 negate_expr (tree t)
1280 type = TREE_TYPE (t);
1281 STRIP_SIGN_NOPS (t);
1283 tem = fold_negate_expr (t);
1285 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1286 return fold_convert (type, tem);
1289 /* Split a tree IN into a constant, literal and variable parts that could be
1290 combined with CODE to make IN. "constant" means an expression with
1291 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1292 commutative arithmetic operation. Store the constant part into *CONP,
1293 the literal in *LITP and return the variable part. If a part isn't
1294 present, set it to null. If the tree does not decompose in this way,
1295 return the entire tree as the variable part and the other parts as null.
1297 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1298 case, we negate an operand that was subtracted. Except if it is a
1299 literal for which we use *MINUS_LITP instead.
1301 If NEGATE_P is true, we are negating all of IN, again except a literal
1302 for which we use *MINUS_LITP instead.
1304 If IN is itself a literal or constant, return it as appropriate.
1306 Note that we do not guarantee that any of the three values will be the
1307 same type as IN, but they will have the same signedness and mode. */
1310 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1311 tree *minus_litp, int negate_p)
1319 /* Strip any conversions that don't change the machine mode or signedness. */
1320 STRIP_SIGN_NOPS (in);
1322 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1324 else if (TREE_CODE (in) == code
1325 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1326 /* We can associate addition and subtraction together (even
1327 though the C standard doesn't say so) for integers because
1328 the value is not affected. For reals, the value might be
1329 affected, so we can't. */
1330 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1331 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1333 tree op0 = TREE_OPERAND (in, 0);
1334 tree op1 = TREE_OPERAND (in, 1);
1335 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1336 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1338 /* First see if either of the operands is a literal, then a constant. */
1339 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1340 *litp = op0, op0 = 0;
1341 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1342 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1344 if (op0 != 0 && TREE_CONSTANT (op0))
1345 *conp = op0, op0 = 0;
1346 else if (op1 != 0 && TREE_CONSTANT (op1))
1347 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1349 /* If we haven't dealt with either operand, this is not a case we can
1350 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1351 if (op0 != 0 && op1 != 0)
1356 var = op1, neg_var_p = neg1_p;
1358 /* Now do any needed negations. */
1360 *minus_litp = *litp, *litp = 0;
1362 *conp = negate_expr (*conp);
1364 var = negate_expr (var);
1366 else if (TREE_CONSTANT (in))
1374 *minus_litp = *litp, *litp = 0;
1375 else if (*minus_litp)
1376 *litp = *minus_litp, *minus_litp = 0;
1377 *conp = negate_expr (*conp);
1378 var = negate_expr (var);
1384 /* Re-associate trees split by the above function. T1 and T2 are either
1385 expressions to associate or null. Return the new expression, if any. If
1386 we build an operation, do it in TYPE and with CODE. */
1389 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1396 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1397 try to fold this since we will have infinite recursion. But do
1398 deal with any NEGATE_EXPRs. */
1399 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1400 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1402 if (code == PLUS_EXPR)
1404 if (TREE_CODE (t1) == NEGATE_EXPR)
1405 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1406 fold_convert (type, TREE_OPERAND (t1, 0)));
1407 else if (TREE_CODE (t2) == NEGATE_EXPR)
1408 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1409 fold_convert (type, TREE_OPERAND (t2, 0)));
1410 else if (integer_zerop (t2))
1411 return fold_convert (type, t1);
1413 else if (code == MINUS_EXPR)
1415 if (integer_zerop (t2))
1416 return fold_convert (type, t1);
1419 return build2 (code, type, fold_convert (type, t1),
1420 fold_convert (type, t2));
1423 return fold_build2 (code, type, fold_convert (type, t1),
1424 fold_convert (type, t2));
1427 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1428 for use in int_const_binop, size_binop and size_diffop. */
1431 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1433 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1435 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1450 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1451 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1452 && TYPE_MODE (type1) == TYPE_MODE (type2);
1456 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1457 to produce a new constant. Return NULL_TREE if we don't know how
1458 to evaluate CODE at compile-time.
1460 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1463 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1465 unsigned HOST_WIDE_INT int1l, int2l;
1466 HOST_WIDE_INT int1h, int2h;
1467 unsigned HOST_WIDE_INT low;
1469 unsigned HOST_WIDE_INT garbagel;
1470 HOST_WIDE_INT garbageh;
1472 tree type = TREE_TYPE (arg1);
1473 int uns = TYPE_UNSIGNED (type);
1475 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1478 int1l = TREE_INT_CST_LOW (arg1);
1479 int1h = TREE_INT_CST_HIGH (arg1);
1480 int2l = TREE_INT_CST_LOW (arg2);
1481 int2h = TREE_INT_CST_HIGH (arg2);
1486 low = int1l | int2l, hi = int1h | int2h;
1490 low = int1l ^ int2l, hi = int1h ^ int2h;
1494 low = int1l & int2l, hi = int1h & int2h;
1500 /* It's unclear from the C standard whether shifts can overflow.
1501 The following code ignores overflow; perhaps a C standard
1502 interpretation ruling is needed. */
1503 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1510 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1515 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1519 neg_double (int2l, int2h, &low, &hi);
1520 add_double (int1l, int1h, low, hi, &low, &hi);
1521 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1525 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1528 case TRUNC_DIV_EXPR:
1529 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1530 case EXACT_DIV_EXPR:
1531 /* This is a shortcut for a common special case. */
1532 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1533 && ! TREE_CONSTANT_OVERFLOW (arg1)
1534 && ! TREE_CONSTANT_OVERFLOW (arg2)
1535 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1537 if (code == CEIL_DIV_EXPR)
1540 low = int1l / int2l, hi = 0;
1544 /* ... fall through ... */
1546 case ROUND_DIV_EXPR:
1547 if (int2h == 0 && int2l == 0)
1549 if (int2h == 0 && int2l == 1)
1551 low = int1l, hi = int1h;
1554 if (int1l == int2l && int1h == int2h
1555 && ! (int1l == 0 && int1h == 0))
1560 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1561 &low, &hi, &garbagel, &garbageh);
1564 case TRUNC_MOD_EXPR:
1565 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1566 /* This is a shortcut for a common special case. */
1567 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1568 && ! TREE_CONSTANT_OVERFLOW (arg1)
1569 && ! TREE_CONSTANT_OVERFLOW (arg2)
1570 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1572 if (code == CEIL_MOD_EXPR)
1574 low = int1l % int2l, hi = 0;
1578 /* ... fall through ... */
1580 case ROUND_MOD_EXPR:
1581 if (int2h == 0 && int2l == 0)
1583 overflow = div_and_round_double (code, uns,
1584 int1l, int1h, int2l, int2h,
1585 &garbagel, &garbageh, &low, &hi);
1591 low = (((unsigned HOST_WIDE_INT) int1h
1592 < (unsigned HOST_WIDE_INT) int2h)
1593 || (((unsigned HOST_WIDE_INT) int1h
1594 == (unsigned HOST_WIDE_INT) int2h)
1597 low = (int1h < int2h
1598 || (int1h == int2h && int1l < int2l));
1600 if (low == (code == MIN_EXPR))
1601 low = int1l, hi = int1h;
1603 low = int2l, hi = int2h;
1612 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1614 /* Propagate overflow flags ourselves. */
1615 if (((!uns || is_sizetype) && overflow)
1616 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1619 TREE_OVERFLOW (t) = 1;
1620 TREE_CONSTANT_OVERFLOW (t) = 1;
1622 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1625 TREE_CONSTANT_OVERFLOW (t) = 1;
1629 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1630 ((!uns || is_sizetype) && overflow)
1631 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1632 TREE_CONSTANT_OVERFLOW (arg1)
1633 | TREE_CONSTANT_OVERFLOW (arg2));
1638 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1639 constant. We assume ARG1 and ARG2 have the same data type, or at least
1640 are the same kind of constant and the same machine mode. Return zero if
1641 combining the constants is not allowed in the current operating mode.
1643 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1646 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1648 /* Sanity check for the recursive cases. */
1655 if (TREE_CODE (arg1) == INTEGER_CST)
1656 return int_const_binop (code, arg1, arg2, notrunc);
1658 if (TREE_CODE (arg1) == REAL_CST)
1660 enum machine_mode mode;
1663 REAL_VALUE_TYPE value;
1664 REAL_VALUE_TYPE result;
1668 /* The following codes are handled by real_arithmetic. */
1683 d1 = TREE_REAL_CST (arg1);
1684 d2 = TREE_REAL_CST (arg2);
1686 type = TREE_TYPE (arg1);
1687 mode = TYPE_MODE (type);
1689 /* Don't perform operation if we honor signaling NaNs and
1690 either operand is a NaN. */
1691 if (HONOR_SNANS (mode)
1692 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1695 /* Don't perform operation if it would raise a division
1696 by zero exception. */
1697 if (code == RDIV_EXPR
1698 && REAL_VALUES_EQUAL (d2, dconst0)
1699 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1702 /* If either operand is a NaN, just return it. Otherwise, set up
1703 for floating-point trap; we return an overflow. */
1704 if (REAL_VALUE_ISNAN (d1))
1706 else if (REAL_VALUE_ISNAN (d2))
1709 inexact = real_arithmetic (&value, code, &d1, &d2);
1710 real_convert (&result, mode, &value);
1712 /* Don't constant fold this floating point operation if
1713 the result has overflowed and flag_trapping_math. */
1714 if (flag_trapping_math
1715 && MODE_HAS_INFINITIES (mode)
1716 && REAL_VALUE_ISINF (result)
1717 && !REAL_VALUE_ISINF (d1)
1718 && !REAL_VALUE_ISINF (d2))
1721 /* Don't constant fold this floating point operation if the
1722 result may dependent upon the run-time rounding mode and
1723 flag_rounding_math is set, or if GCC's software emulation
1724 is unable to accurately represent the result. */
1725 if ((flag_rounding_math
1726 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1727 && !flag_unsafe_math_optimizations))
1728 && (inexact || !real_identical (&result, &value)))
1731 t = build_real (type, result);
1733 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1734 TREE_CONSTANT_OVERFLOW (t)
1736 | TREE_CONSTANT_OVERFLOW (arg1)
1737 | TREE_CONSTANT_OVERFLOW (arg2);
1741 if (TREE_CODE (arg1) == COMPLEX_CST)
1743 tree type = TREE_TYPE (arg1);
1744 tree r1 = TREE_REALPART (arg1);
1745 tree i1 = TREE_IMAGPART (arg1);
1746 tree r2 = TREE_REALPART (arg2);
1747 tree i2 = TREE_IMAGPART (arg2);
1754 real = const_binop (code, r1, r2, notrunc);
1755 imag = const_binop (code, i1, i2, notrunc);
1759 real = const_binop (MINUS_EXPR,
1760 const_binop (MULT_EXPR, r1, r2, notrunc),
1761 const_binop (MULT_EXPR, i1, i2, notrunc),
1763 imag = const_binop (PLUS_EXPR,
1764 const_binop (MULT_EXPR, r1, i2, notrunc),
1765 const_binop (MULT_EXPR, i1, r2, notrunc),
1772 = const_binop (PLUS_EXPR,
1773 const_binop (MULT_EXPR, r2, r2, notrunc),
1774 const_binop (MULT_EXPR, i2, i2, notrunc),
1777 = const_binop (PLUS_EXPR,
1778 const_binop (MULT_EXPR, r1, r2, notrunc),
1779 const_binop (MULT_EXPR, i1, i2, notrunc),
1782 = const_binop (MINUS_EXPR,
1783 const_binop (MULT_EXPR, i1, r2, notrunc),
1784 const_binop (MULT_EXPR, r1, i2, notrunc),
1787 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1788 code = TRUNC_DIV_EXPR;
1790 real = const_binop (code, t1, magsquared, notrunc);
1791 imag = const_binop (code, t2, magsquared, notrunc);
1800 return build_complex (type, real, imag);
1806 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1807 indicates which particular sizetype to create. */
1810 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1812 return build_int_cst (sizetype_tab[(int) kind], number);
1815 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1816 is a tree code. The type of the result is taken from the operands.
1817 Both must be equivalent integer types, ala int_binop_types_match_p.
1818 If the operands are constant, so is the result. */
1821 size_binop (enum tree_code code, tree arg0, tree arg1)
1823 tree type = TREE_TYPE (arg0);
1825 if (arg0 == error_mark_node || arg1 == error_mark_node)
1826 return error_mark_node;
1828 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1831 /* Handle the special case of two integer constants faster. */
1832 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1834 /* And some specific cases even faster than that. */
1835 if (code == PLUS_EXPR && integer_zerop (arg0))
1837 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1838 && integer_zerop (arg1))
1840 else if (code == MULT_EXPR && integer_onep (arg0))
1843 /* Handle general case of two integer constants. */
1844 return int_const_binop (code, arg0, arg1, 0);
1847 return fold_build2 (code, type, arg0, arg1);
1850 /* Given two values, either both of sizetype or both of bitsizetype,
1851 compute the difference between the two values. Return the value
1852 in signed type corresponding to the type of the operands. */
1855 size_diffop (tree arg0, tree arg1)
1857 tree type = TREE_TYPE (arg0);
1860 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1863 /* If the type is already signed, just do the simple thing. */
1864 if (!TYPE_UNSIGNED (type))
1865 return size_binop (MINUS_EXPR, arg0, arg1);
1867 if (type == sizetype)
1869 else if (type == bitsizetype)
1870 ctype = sbitsizetype;
1872 ctype = lang_hooks.types.signed_type (type);
1874 /* If either operand is not a constant, do the conversions to the signed
1875 type and subtract. The hardware will do the right thing with any
1876 overflow in the subtraction. */
1877 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1878 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1879 fold_convert (ctype, arg1));
1881 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1882 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1883 overflow) and negate (which can't either). Special-case a result
1884 of zero while we're here. */
1885 if (tree_int_cst_equal (arg0, arg1))
1886 return build_int_cst (ctype, 0);
1887 else if (tree_int_cst_lt (arg1, arg0))
1888 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1890 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1891 fold_convert (ctype, size_binop (MINUS_EXPR,
1895 /* A subroutine of fold_convert_const handling conversions of an
1896 INTEGER_CST to another integer type. */
1899 fold_convert_const_int_from_int (tree type, tree arg1)
1903 /* Given an integer constant, make new constant with new type,
1904 appropriately sign-extended or truncated. */
1905 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1906 TREE_INT_CST_HIGH (arg1),
1907 /* Don't set the overflow when
1908 converting a pointer */
1909 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1910 (TREE_INT_CST_HIGH (arg1) < 0
1911 && (TYPE_UNSIGNED (type)
1912 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1913 | TREE_OVERFLOW (arg1),
1914 TREE_CONSTANT_OVERFLOW (arg1));
1919 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1920 to an integer type. */
1923 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1928 /* The following code implements the floating point to integer
1929 conversion rules required by the Java Language Specification,
1930 that IEEE NaNs are mapped to zero and values that overflow
1931 the target precision saturate, i.e. values greater than
1932 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1933 are mapped to INT_MIN. These semantics are allowed by the
1934 C and C++ standards that simply state that the behavior of
1935 FP-to-integer conversion is unspecified upon overflow. */
1937 HOST_WIDE_INT high, low;
1939 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1943 case FIX_TRUNC_EXPR:
1944 real_trunc (&r, VOIDmode, &x);
1951 /* If R is NaN, return zero and show we have an overflow. */
1952 if (REAL_VALUE_ISNAN (r))
1959 /* See if R is less than the lower bound or greater than the
1964 tree lt = TYPE_MIN_VALUE (type);
1965 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1966 if (REAL_VALUES_LESS (r, l))
1969 high = TREE_INT_CST_HIGH (lt);
1970 low = TREE_INT_CST_LOW (lt);
1976 tree ut = TYPE_MAX_VALUE (type);
1979 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1980 if (REAL_VALUES_LESS (u, r))
1983 high = TREE_INT_CST_HIGH (ut);
1984 low = TREE_INT_CST_LOW (ut);
1990 REAL_VALUE_TO_INT (&low, &high, r);
1992 t = force_fit_type_double (type, low, high, -1,
1993 overflow | TREE_OVERFLOW (arg1),
1994 TREE_CONSTANT_OVERFLOW (arg1));
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999 to another floating point type. */
2002 fold_convert_const_real_from_real (tree type, tree arg1)
2004 REAL_VALUE_TYPE value;
2007 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2008 t = build_real (type, value);
2010 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2011 TREE_CONSTANT_OVERFLOW (t)
2012 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2016 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2017 type TYPE. If no simplification can be done return NULL_TREE. */
2020 fold_convert_const (enum tree_code code, tree type, tree arg1)
2022 if (TREE_TYPE (arg1) == type)
2025 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2027 if (TREE_CODE (arg1) == INTEGER_CST)
2028 return fold_convert_const_int_from_int (type, arg1);
2029 else if (TREE_CODE (arg1) == REAL_CST)
2030 return fold_convert_const_int_from_real (code, type, arg1);
2032 else if (TREE_CODE (type) == REAL_TYPE)
2034 if (TREE_CODE (arg1) == INTEGER_CST)
2035 return build_real_from_int_cst (type, arg1);
2036 if (TREE_CODE (arg1) == REAL_CST)
2037 return fold_convert_const_real_from_real (type, arg1);
2042 /* Construct a vector of zero elements of vector type TYPE. */
2045 build_zero_vector (tree type)
2050 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2051 units = TYPE_VECTOR_SUBPARTS (type);
2054 for (i = 0; i < units; i++)
2055 list = tree_cons (NULL_TREE, elem, list);
2056 return build_vector (type, list);
2059 /* Convert expression ARG to type TYPE. Used by the middle-end for
2060 simple conversions in preference to calling the front-end's convert. */
2063 fold_convert (tree type, tree arg)
2065 tree orig = TREE_TYPE (arg);
2071 if (TREE_CODE (arg) == ERROR_MARK
2072 || TREE_CODE (type) == ERROR_MARK
2073 || TREE_CODE (orig) == ERROR_MARK)
2074 return error_mark_node;
2076 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2077 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2078 TYPE_MAIN_VARIANT (orig)))
2079 return fold_build1 (NOP_EXPR, type, arg);
2081 switch (TREE_CODE (type))
2083 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2084 case POINTER_TYPE: case REFERENCE_TYPE:
2086 if (TREE_CODE (arg) == INTEGER_CST)
2088 tem = fold_convert_const (NOP_EXPR, type, arg);
2089 if (tem != NULL_TREE)
2092 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2093 || TREE_CODE (orig) == OFFSET_TYPE)
2094 return fold_build1 (NOP_EXPR, type, arg);
2095 if (TREE_CODE (orig) == COMPLEX_TYPE)
2097 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2098 return fold_convert (type, tem);
2100 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2101 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2102 return fold_build1 (NOP_EXPR, type, arg);
2105 if (TREE_CODE (arg) == INTEGER_CST)
2107 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2108 if (tem != NULL_TREE)
2111 else if (TREE_CODE (arg) == REAL_CST)
2113 tem = fold_convert_const (NOP_EXPR, type, arg);
2114 if (tem != NULL_TREE)
2118 switch (TREE_CODE (orig))
2121 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2122 case POINTER_TYPE: case REFERENCE_TYPE:
2123 return fold_build1 (FLOAT_EXPR, type, arg);
2126 return fold_build1 (NOP_EXPR, type, arg);
2129 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2130 return fold_convert (type, tem);
2137 switch (TREE_CODE (orig))
2140 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2141 case POINTER_TYPE: case REFERENCE_TYPE:
2143 return build2 (COMPLEX_EXPR, type,
2144 fold_convert (TREE_TYPE (type), arg),
2145 fold_convert (TREE_TYPE (type), integer_zero_node));
2150 if (TREE_CODE (arg) == COMPLEX_EXPR)
2152 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2153 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2154 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2157 arg = save_expr (arg);
2158 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2159 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2160 rpart = fold_convert (TREE_TYPE (type), rpart);
2161 ipart = fold_convert (TREE_TYPE (type), ipart);
2162 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2170 if (integer_zerop (arg))
2171 return build_zero_vector (type);
2172 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2173 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2174 || TREE_CODE (orig) == VECTOR_TYPE);
2175 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2178 tem = fold_ignored_result (arg);
2179 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2181 return fold_build1 (NOP_EXPR, type, tem);
2188 /* Return false if expr can be assumed not to be an lvalue, true
2192 maybe_lvalue_p (tree x)
2194 /* We only need to wrap lvalue tree codes. */
2195 switch (TREE_CODE (x))
2206 case ALIGN_INDIRECT_REF:
2207 case MISALIGNED_INDIRECT_REF:
2209 case ARRAY_RANGE_REF:
2215 case PREINCREMENT_EXPR:
2216 case PREDECREMENT_EXPR:
2218 case TRY_CATCH_EXPR:
2219 case WITH_CLEANUP_EXPR:
2222 case GIMPLE_MODIFY_STMT:
2231 /* Assume the worst for front-end tree codes. */
2232 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2240 /* Return an expr equal to X but certainly not valid as an lvalue. */
2245 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2250 if (! maybe_lvalue_p (x))
2252 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2255 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2256 Zero means allow extended lvalues. */
2258 int pedantic_lvalues;
2260 /* When pedantic, return an expr equal to X but certainly not valid as a
2261 pedantic lvalue. Otherwise, return X. */
2264 pedantic_non_lvalue (tree x)
2266 if (pedantic_lvalues)
2267 return non_lvalue (x);
2272 /* Given a tree comparison code, return the code that is the logical inverse
2273 of the given code. It is not safe to do this for floating-point
2274 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2275 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2278 invert_tree_comparison (enum tree_code code, bool honor_nans)
2280 if (honor_nans && flag_trapping_math)
2290 return honor_nans ? UNLE_EXPR : LE_EXPR;
2292 return honor_nans ? UNLT_EXPR : LT_EXPR;
2294 return honor_nans ? UNGE_EXPR : GE_EXPR;
2296 return honor_nans ? UNGT_EXPR : GT_EXPR;
2310 return UNORDERED_EXPR;
2311 case UNORDERED_EXPR:
2312 return ORDERED_EXPR;
2318 /* Similar, but return the comparison that results if the operands are
2319 swapped. This is safe for floating-point. */
2322 swap_tree_comparison (enum tree_code code)
2329 case UNORDERED_EXPR:
2355 /* Convert a comparison tree code from an enum tree_code representation
2356 into a compcode bit-based encoding. This function is the inverse of
2357 compcode_to_comparison. */
2359 static enum comparison_code
2360 comparison_to_compcode (enum tree_code code)
2377 return COMPCODE_ORD;
2378 case UNORDERED_EXPR:
2379 return COMPCODE_UNORD;
2381 return COMPCODE_UNLT;
2383 return COMPCODE_UNEQ;
2385 return COMPCODE_UNLE;
2387 return COMPCODE_UNGT;
2389 return COMPCODE_LTGT;
2391 return COMPCODE_UNGE;
2397 /* Convert a compcode bit-based encoding of a comparison operator back
2398 to GCC's enum tree_code representation. This function is the
2399 inverse of comparison_to_compcode. */
2401 static enum tree_code
2402 compcode_to_comparison (enum comparison_code code)
2419 return ORDERED_EXPR;
2420 case COMPCODE_UNORD:
2421 return UNORDERED_EXPR;
2439 /* Return a tree for the comparison which is the combination of
2440 doing the AND or OR (depending on CODE) of the two operations LCODE
2441 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2442 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2443 if this makes the transformation invalid. */
2446 combine_comparisons (enum tree_code code, enum tree_code lcode,
2447 enum tree_code rcode, tree truth_type,
2448 tree ll_arg, tree lr_arg)
2450 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2451 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2452 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2453 enum comparison_code compcode;
2457 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2458 compcode = lcompcode & rcompcode;
2461 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2462 compcode = lcompcode | rcompcode;
2471 /* Eliminate unordered comparisons, as well as LTGT and ORD
2472 which are not used unless the mode has NaNs. */
2473 compcode &= ~COMPCODE_UNORD;
2474 if (compcode == COMPCODE_LTGT)
2475 compcode = COMPCODE_NE;
2476 else if (compcode == COMPCODE_ORD)
2477 compcode = COMPCODE_TRUE;
2479 else if (flag_trapping_math)
2481 /* Check that the original operation and the optimized ones will trap
2482 under the same condition. */
2483 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2484 && (lcompcode != COMPCODE_EQ)
2485 && (lcompcode != COMPCODE_ORD);
2486 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2487 && (rcompcode != COMPCODE_EQ)
2488 && (rcompcode != COMPCODE_ORD);
2489 bool trap = (compcode & COMPCODE_UNORD) == 0
2490 && (compcode != COMPCODE_EQ)
2491 && (compcode != COMPCODE_ORD);
2493 /* In a short-circuited boolean expression the LHS might be
2494 such that the RHS, if evaluated, will never trap. For
2495 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2496 if neither x nor y is NaN. (This is a mixed blessing: for
2497 example, the expression above will never trap, hence
2498 optimizing it to x < y would be invalid). */
2499 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2500 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2503 /* If the comparison was short-circuited, and only the RHS
2504 trapped, we may now generate a spurious trap. */
2506 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2509 /* If we changed the conditions that cause a trap, we lose. */
2510 if ((ltrap || rtrap) != trap)
2514 if (compcode == COMPCODE_TRUE)
2515 return constant_boolean_node (true, truth_type);
2516 else if (compcode == COMPCODE_FALSE)
2517 return constant_boolean_node (false, truth_type);
2519 return fold_build2 (compcode_to_comparison (compcode),
2520 truth_type, ll_arg, lr_arg);
2523 /* Return nonzero if CODE is a tree code that represents a truth value. */
2526 truth_value_p (enum tree_code code)
2528 return (TREE_CODE_CLASS (code) == tcc_comparison
2529 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2530 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2531 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2534 /* Return nonzero if two operands (typically of the same tree node)
2535 are necessarily equal. If either argument has side-effects this
2536 function returns zero. FLAGS modifies behavior as follows:
2538 If OEP_ONLY_CONST is set, only return nonzero for constants.
2539 This function tests whether the operands are indistinguishable;
2540 it does not test whether they are equal using C's == operation.
2541 The distinction is important for IEEE floating point, because
2542 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2543 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2545 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2546 even though it may hold multiple values during a function.
2547 This is because a GCC tree node guarantees that nothing else is
2548 executed between the evaluation of its "operands" (which may often
2549 be evaluated in arbitrary order). Hence if the operands themselves
2550 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2551 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2552 unset means assuming isochronic (or instantaneous) tree equivalence.
2553 Unless comparing arbitrary expression trees, such as from different
2554 statements, this flag can usually be left unset.
2556 If OEP_PURE_SAME is set, then pure functions with identical arguments
2557 are considered the same. It is used when the caller has other ways
2558 to ensure that global memory is unchanged in between. */
2561 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2563 /* If either is ERROR_MARK, they aren't equal. */
2564 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2567 /* If both types don't have the same signedness, then we can't consider
2568 them equal. We must check this before the STRIP_NOPS calls
2569 because they may change the signedness of the arguments. */
2570 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2573 /* If both types don't have the same precision, then it is not safe
2575 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2581 /* In case both args are comparisons but with different comparison
2582 code, try to swap the comparison operands of one arg to produce
2583 a match and compare that variant. */
2584 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2585 && COMPARISON_CLASS_P (arg0)
2586 && COMPARISON_CLASS_P (arg1))
2588 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2590 if (TREE_CODE (arg0) == swap_code)
2591 return operand_equal_p (TREE_OPERAND (arg0, 0),
2592 TREE_OPERAND (arg1, 1), flags)
2593 && operand_equal_p (TREE_OPERAND (arg0, 1),
2594 TREE_OPERAND (arg1, 0), flags);
2597 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2598 /* This is needed for conversions and for COMPONENT_REF.
2599 Might as well play it safe and always test this. */
2600 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2601 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2602 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2605 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2606 We don't care about side effects in that case because the SAVE_EXPR
2607 takes care of that for us. In all other cases, two expressions are
2608 equal if they have no side effects. If we have two identical
2609 expressions with side effects that should be treated the same due
2610 to the only side effects being identical SAVE_EXPR's, that will
2611 be detected in the recursive calls below. */
2612 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2613 && (TREE_CODE (arg0) == SAVE_EXPR
2614 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2617 /* Next handle constant cases, those for which we can return 1 even
2618 if ONLY_CONST is set. */
2619 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2620 switch (TREE_CODE (arg0))
2623 return tree_int_cst_equal (arg0, arg1);
2626 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2627 TREE_REAL_CST (arg1)))
2631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2633 /* If we do not distinguish between signed and unsigned zero,
2634 consider them equal. */
2635 if (real_zerop (arg0) && real_zerop (arg1))
2644 v1 = TREE_VECTOR_CST_ELTS (arg0);
2645 v2 = TREE_VECTOR_CST_ELTS (arg1);
2648 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2651 v1 = TREE_CHAIN (v1);
2652 v2 = TREE_CHAIN (v2);
2659 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2661 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2665 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2666 && ! memcmp (TREE_STRING_POINTER (arg0),
2667 TREE_STRING_POINTER (arg1),
2668 TREE_STRING_LENGTH (arg0)));
2671 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2677 if (flags & OEP_ONLY_CONST)
2680 /* Define macros to test an operand from arg0 and arg1 for equality and a
2681 variant that allows null and views null as being different from any
2682 non-null value. In the latter case, if either is null, the both
2683 must be; otherwise, do the normal comparison. */
2684 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2685 TREE_OPERAND (arg1, N), flags)
2687 #define OP_SAME_WITH_NULL(N) \
2688 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2689 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2691 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2694 /* Two conversions are equal only if signedness and modes match. */
2695 switch (TREE_CODE (arg0))
2699 case FIX_TRUNC_EXPR:
2700 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2701 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2711 case tcc_comparison:
2713 if (OP_SAME (0) && OP_SAME (1))
2716 /* For commutative ops, allow the other order. */
2717 return (commutative_tree_code (TREE_CODE (arg0))
2718 && operand_equal_p (TREE_OPERAND (arg0, 0),
2719 TREE_OPERAND (arg1, 1), flags)
2720 && operand_equal_p (TREE_OPERAND (arg0, 1),
2721 TREE_OPERAND (arg1, 0), flags));
2724 /* If either of the pointer (or reference) expressions we are
2725 dereferencing contain a side effect, these cannot be equal. */
2726 if (TREE_SIDE_EFFECTS (arg0)
2727 || TREE_SIDE_EFFECTS (arg1))
2730 switch (TREE_CODE (arg0))
2733 case ALIGN_INDIRECT_REF:
2734 case MISALIGNED_INDIRECT_REF:
2740 case ARRAY_RANGE_REF:
2741 /* Operands 2 and 3 may be null. */
2744 && OP_SAME_WITH_NULL (2)
2745 && OP_SAME_WITH_NULL (3));
2748 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2749 may be NULL when we're called to compare MEM_EXPRs. */
2750 return OP_SAME_WITH_NULL (0)
2752 && OP_SAME_WITH_NULL (2);
2755 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2761 case tcc_expression:
2762 switch (TREE_CODE (arg0))
2765 case TRUTH_NOT_EXPR:
2768 case TRUTH_ANDIF_EXPR:
2769 case TRUTH_ORIF_EXPR:
2770 return OP_SAME (0) && OP_SAME (1);
2772 case TRUTH_AND_EXPR:
2774 case TRUTH_XOR_EXPR:
2775 if (OP_SAME (0) && OP_SAME (1))
2778 /* Otherwise take into account this is a commutative operation. */
2779 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2780 TREE_OPERAND (arg1, 1), flags)
2781 && operand_equal_p (TREE_OPERAND (arg0, 1),
2782 TREE_OPERAND (arg1, 0), flags));
2785 /* If the CALL_EXPRs call different functions, then they
2786 clearly can not be equal. */
2791 unsigned int cef = call_expr_flags (arg0);
2792 if (flags & OEP_PURE_SAME)
2793 cef &= ECF_CONST | ECF_PURE;
2800 /* Now see if all the arguments are the same. operand_equal_p
2801 does not handle TREE_LIST, so we walk the operands here
2802 feeding them to operand_equal_p. */
2803 arg0 = TREE_OPERAND (arg0, 1);
2804 arg1 = TREE_OPERAND (arg1, 1);
2805 while (arg0 && arg1)
2807 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2811 arg0 = TREE_CHAIN (arg0);
2812 arg1 = TREE_CHAIN (arg1);
2815 /* If we get here and both argument lists are exhausted
2816 then the CALL_EXPRs are equal. */
2817 return ! (arg0 || arg1);
2823 case tcc_declaration:
2824 /* Consider __builtin_sqrt equal to sqrt. */
2825 return (TREE_CODE (arg0) == FUNCTION_DECL
2826 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2827 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2828 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2835 #undef OP_SAME_WITH_NULL
2838 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2839 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2841 When in doubt, return 0. */
2844 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2846 int unsignedp1, unsignedpo;
2847 tree primarg0, primarg1, primother;
2848 unsigned int correct_width;
2850 if (operand_equal_p (arg0, arg1, 0))
2853 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2854 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2857 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2858 and see if the inner values are the same. This removes any
2859 signedness comparison, which doesn't matter here. */
2860 primarg0 = arg0, primarg1 = arg1;
2861 STRIP_NOPS (primarg0);
2862 STRIP_NOPS (primarg1);
2863 if (operand_equal_p (primarg0, primarg1, 0))
2866 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2867 actual comparison operand, ARG0.
2869 First throw away any conversions to wider types
2870 already present in the operands. */
2872 primarg1 = get_narrower (arg1, &unsignedp1);
2873 primother = get_narrower (other, &unsignedpo);
2875 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2876 if (unsignedp1 == unsignedpo
2877 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2878 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2880 tree type = TREE_TYPE (arg0);
2882 /* Make sure shorter operand is extended the right way
2883 to match the longer operand. */
2884 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2885 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2887 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2894 /* See if ARG is an expression that is either a comparison or is performing
2895 arithmetic on comparisons. The comparisons must only be comparing
2896 two different values, which will be stored in *CVAL1 and *CVAL2; if
2897 they are nonzero it means that some operands have already been found.
2898 No variables may be used anywhere else in the expression except in the
2899 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2900 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2902 If this is true, return 1. Otherwise, return zero. */
2905 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2907 enum tree_code code = TREE_CODE (arg);
2908 enum tree_code_class class = TREE_CODE_CLASS (code);
2910 /* We can handle some of the tcc_expression cases here. */
2911 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2913 else if (class == tcc_expression
2914 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2915 || code == COMPOUND_EXPR))
2918 else if (class == tcc_expression && code == SAVE_EXPR
2919 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2921 /* If we've already found a CVAL1 or CVAL2, this expression is
2922 two complex to handle. */
2923 if (*cval1 || *cval2)
2933 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2936 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2937 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2938 cval1, cval2, save_p));
2943 case tcc_expression:
2944 if (code == COND_EXPR)
2945 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2946 cval1, cval2, save_p)
2947 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2948 cval1, cval2, save_p)
2949 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2950 cval1, cval2, save_p));
2953 case tcc_comparison:
2954 /* First see if we can handle the first operand, then the second. For
2955 the second operand, we know *CVAL1 can't be zero. It must be that
2956 one side of the comparison is each of the values; test for the
2957 case where this isn't true by failing if the two operands
2960 if (operand_equal_p (TREE_OPERAND (arg, 0),
2961 TREE_OPERAND (arg, 1), 0))
2965 *cval1 = TREE_OPERAND (arg, 0);
2966 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2968 else if (*cval2 == 0)
2969 *cval2 = TREE_OPERAND (arg, 0);
2970 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2975 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2977 else if (*cval2 == 0)
2978 *cval2 = TREE_OPERAND (arg, 1);
2979 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2991 /* ARG is a tree that is known to contain just arithmetic operations and
2992 comparisons. Evaluate the operations in the tree substituting NEW0 for
2993 any occurrence of OLD0 as an operand of a comparison and likewise for
2997 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2999 tree type = TREE_TYPE (arg);
3000 enum tree_code code = TREE_CODE (arg);
3001 enum tree_code_class class = TREE_CODE_CLASS (code);
3003 /* We can handle some of the tcc_expression cases here. */
3004 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3006 else if (class == tcc_expression
3007 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3013 return fold_build1 (code, type,
3014 eval_subst (TREE_OPERAND (arg, 0),
3015 old0, new0, old1, new1));
3018 return fold_build2 (code, type,
3019 eval_subst (TREE_OPERAND (arg, 0),
3020 old0, new0, old1, new1),
3021 eval_subst (TREE_OPERAND (arg, 1),
3022 old0, new0, old1, new1));
3024 case tcc_expression:
3028 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3031 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3034 return fold_build3 (code, type,
3035 eval_subst (TREE_OPERAND (arg, 0),
3036 old0, new0, old1, new1),
3037 eval_subst (TREE_OPERAND (arg, 1),
3038 old0, new0, old1, new1),
3039 eval_subst (TREE_OPERAND (arg, 2),
3040 old0, new0, old1, new1));
3044 /* Fall through - ??? */
3046 case tcc_comparison:
3048 tree arg0 = TREE_OPERAND (arg, 0);
3049 tree arg1 = TREE_OPERAND (arg, 1);
3051 /* We need to check both for exact equality and tree equality. The
3052 former will be true if the operand has a side-effect. In that
3053 case, we know the operand occurred exactly once. */
3055 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3057 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3060 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3062 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3065 return fold_build2 (code, type, arg0, arg1);
3073 /* Return a tree for the case when the result of an expression is RESULT
3074 converted to TYPE and OMITTED was previously an operand of the expression
3075 but is now not needed (e.g., we folded OMITTED * 0).
3077 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3078 the conversion of RESULT to TYPE. */
3081 omit_one_operand (tree type, tree result, tree omitted)
3083 tree t = fold_convert (type, result);
3085 if (TREE_SIDE_EFFECTS (omitted))
3086 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3088 return non_lvalue (t);
3091 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3094 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3096 tree t = fold_convert (type, result);
3098 if (TREE_SIDE_EFFECTS (omitted))
3099 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3101 return pedantic_non_lvalue (t);
3104 /* Return a tree for the case when the result of an expression is RESULT
3105 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3106 of the expression but are now not needed.
3108 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3109 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3110 evaluated before OMITTED2. Otherwise, if neither has side effects,
3111 just do the conversion of RESULT to TYPE. */
3114 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3116 tree t = fold_convert (type, result);
3118 if (TREE_SIDE_EFFECTS (omitted2))
3119 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3120 if (TREE_SIDE_EFFECTS (omitted1))
3121 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3123 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3127 /* Return a simplified tree node for the truth-negation of ARG. This
3128 never alters ARG itself. We assume that ARG is an operation that
3129 returns a truth value (0 or 1).
3131 FIXME: one would think we would fold the result, but it causes
3132 problems with the dominator optimizer. */
3135 fold_truth_not_expr (tree arg)
3137 tree type = TREE_TYPE (arg);
3138 enum tree_code code = TREE_CODE (arg);
3140 /* If this is a comparison, we can simply invert it, except for
3141 floating-point non-equality comparisons, in which case we just
3142 enclose a TRUTH_NOT_EXPR around what we have. */
3144 if (TREE_CODE_CLASS (code) == tcc_comparison)
3146 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3147 if (FLOAT_TYPE_P (op_type)
3148 && flag_trapping_math
3149 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3150 && code != NE_EXPR && code != EQ_EXPR)
3154 code = invert_tree_comparison (code,
3155 HONOR_NANS (TYPE_MODE (op_type)));
3156 if (code == ERROR_MARK)
3159 return build2 (code, type,
3160 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3167 return constant_boolean_node (integer_zerop (arg), type);
3169 case TRUTH_AND_EXPR:
3170 return build2 (TRUTH_OR_EXPR, type,
3171 invert_truthvalue (TREE_OPERAND (arg, 0)),
3172 invert_truthvalue (TREE_OPERAND (arg, 1)));
3175 return build2 (TRUTH_AND_EXPR, type,
3176 invert_truthvalue (TREE_OPERAND (arg, 0)),
3177 invert_truthvalue (TREE_OPERAND (arg, 1)));
3179 case TRUTH_XOR_EXPR:
3180 /* Here we can invert either operand. We invert the first operand
3181 unless the second operand is a TRUTH_NOT_EXPR in which case our
3182 result is the XOR of the first operand with the inside of the
3183 negation of the second operand. */
3185 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3186 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3187 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3189 return build2 (TRUTH_XOR_EXPR, type,
3190 invert_truthvalue (TREE_OPERAND (arg, 0)),
3191 TREE_OPERAND (arg, 1));
3193 case TRUTH_ANDIF_EXPR:
3194 return build2 (TRUTH_ORIF_EXPR, type,
3195 invert_truthvalue (TREE_OPERAND (arg, 0)),
3196 invert_truthvalue (TREE_OPERAND (arg, 1)));
3198 case TRUTH_ORIF_EXPR:
3199 return build2 (TRUTH_ANDIF_EXPR, type,
3200 invert_truthvalue (TREE_OPERAND (arg, 0)),
3201 invert_truthvalue (TREE_OPERAND (arg, 1)));
3203 case TRUTH_NOT_EXPR:
3204 return TREE_OPERAND (arg, 0);
3208 tree arg1 = TREE_OPERAND (arg, 1);
3209 tree arg2 = TREE_OPERAND (arg, 2);
3210 /* A COND_EXPR may have a throw as one operand, which
3211 then has void type. Just leave void operands
3213 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3214 VOID_TYPE_P (TREE_TYPE (arg1))
3215 ? arg1 : invert_truthvalue (arg1),
3216 VOID_TYPE_P (TREE_TYPE (arg2))
3217 ? arg2 : invert_truthvalue (arg2));
3221 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3222 invert_truthvalue (TREE_OPERAND (arg, 1)));
3224 case NON_LVALUE_EXPR:
3225 return invert_truthvalue (TREE_OPERAND (arg, 0));
3228 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3229 return build1 (TRUTH_NOT_EXPR, type, arg);
3233 return build1 (TREE_CODE (arg), type,
3234 invert_truthvalue (TREE_OPERAND (arg, 0)));
3237 if (!integer_onep (TREE_OPERAND (arg, 1)))
3239 return build2 (EQ_EXPR, type, arg,
3240 build_int_cst (type, 0));
3243 return build1 (TRUTH_NOT_EXPR, type, arg);
3245 case CLEANUP_POINT_EXPR:
3246 return build1 (CLEANUP_POINT_EXPR, type,
3247 invert_truthvalue (TREE_OPERAND (arg, 0)));
3256 /* Return a simplified tree node for the truth-negation of ARG. This
3257 never alters ARG itself. We assume that ARG is an operation that
3258 returns a truth value (0 or 1).
3260 FIXME: one would think we would fold the result, but it causes
3261 problems with the dominator optimizer. */
3264 invert_truthvalue (tree arg)
3268 if (TREE_CODE (arg) == ERROR_MARK)
3271 tem = fold_truth_not_expr (arg);
3273 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3278 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3279 operands are another bit-wise operation with a common input. If so,
3280 distribute the bit operations to save an operation and possibly two if
3281 constants are involved. For example, convert
3282 (A | B) & (A | C) into A | (B & C)
3283 Further simplification will occur if B and C are constants.
3285 If this optimization cannot be done, 0 will be returned. */
3288 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3293 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3294 || TREE_CODE (arg0) == code
3295 || (TREE_CODE (arg0) != BIT_AND_EXPR
3296 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3299 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3301 common = TREE_OPERAND (arg0, 0);
3302 left = TREE_OPERAND (arg0, 1);
3303 right = TREE_OPERAND (arg1, 1);
3305 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3307 common = TREE_OPERAND (arg0, 0);
3308 left = TREE_OPERAND (arg0, 1);
3309 right = TREE_OPERAND (arg1, 0);
3311 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3313 common = TREE_OPERAND (arg0, 1);
3314 left = TREE_OPERAND (arg0, 0);
3315 right = TREE_OPERAND (arg1, 1);
3317 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3319 common = TREE_OPERAND (arg0, 1);
3320 left = TREE_OPERAND (arg0, 0);
3321 right = TREE_OPERAND (arg1, 0);
3326 return fold_build2 (TREE_CODE (arg0), type, common,
3327 fold_build2 (code, type, left, right));
3330 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3331 with code CODE. This optimization is unsafe. */
3333 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3335 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3336 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3338 /* (A / C) +- (B / C) -> (A +- B) / C. */
3340 && operand_equal_p (TREE_OPERAND (arg0, 1),
3341 TREE_OPERAND (arg1, 1), 0))
3342 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3343 fold_build2 (code, type,
3344 TREE_OPERAND (arg0, 0),
3345 TREE_OPERAND (arg1, 0)),
3346 TREE_OPERAND (arg0, 1));
3348 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3349 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3350 TREE_OPERAND (arg1, 0), 0)
3351 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3352 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3354 REAL_VALUE_TYPE r0, r1;
3355 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3356 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3358 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3360 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3361 real_arithmetic (&r0, code, &r0, &r1);
3362 return fold_build2 (MULT_EXPR, type,
3363 TREE_OPERAND (arg0, 0),
3364 build_real (type, r0));
3370 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3371 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3374 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3381 tree size = TYPE_SIZE (TREE_TYPE (inner));
3382 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3383 || POINTER_TYPE_P (TREE_TYPE (inner)))
3384 && host_integerp (size, 0)
3385 && tree_low_cst (size, 0) == bitsize)
3386 return fold_convert (type, inner);
3389 result = build3 (BIT_FIELD_REF, type, inner,
3390 size_int (bitsize), bitsize_int (bitpos));
3392 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3397 /* Optimize a bit-field compare.
3399 There are two cases: First is a compare against a constant and the
3400 second is a comparison of two items where the fields are at the same
3401 bit position relative to the start of a chunk (byte, halfword, word)
3402 large enough to contain it. In these cases we can avoid the shift
3403 implicit in bitfield extractions.
3405 For constants, we emit a compare of the shifted constant with the
3406 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3407 compared. For two fields at the same position, we do the ANDs with the
3408 similar mask and compare the result of the ANDs.
3410 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3411 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3412 are the left and right operands of the comparison, respectively.
3414 If the optimization described above can be done, we return the resulting
3415 tree. Otherwise we return zero. */
3418 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3421 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3422 tree type = TREE_TYPE (lhs);
3423 tree signed_type, unsigned_type;
3424 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3425 enum machine_mode lmode, rmode, nmode;
3426 int lunsignedp, runsignedp;
3427 int lvolatilep = 0, rvolatilep = 0;
3428 tree linner, rinner = NULL_TREE;
3432 /* Get all the information about the extractions being done. If the bit size
3433 if the same as the size of the underlying object, we aren't doing an
3434 extraction at all and so can do nothing. We also don't want to
3435 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3436 then will no longer be able to replace it. */
3437 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3438 &lunsignedp, &lvolatilep, false);
3439 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3440 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3445 /* If this is not a constant, we can only do something if bit positions,
3446 sizes, and signedness are the same. */
3447 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3448 &runsignedp, &rvolatilep, false);
3450 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3451 || lunsignedp != runsignedp || offset != 0
3452 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3456 /* See if we can find a mode to refer to this field. We should be able to,
3457 but fail if we can't. */
3458 nmode = get_best_mode (lbitsize, lbitpos,
3459 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3460 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3461 TYPE_ALIGN (TREE_TYPE (rinner))),
3462 word_mode, lvolatilep || rvolatilep);
3463 if (nmode == VOIDmode)
3466 /* Set signed and unsigned types of the precision of this mode for the
3468 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3469 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3471 /* Compute the bit position and size for the new reference and our offset
3472 within it. If the new reference is the same size as the original, we
3473 won't optimize anything, so return zero. */
3474 nbitsize = GET_MODE_BITSIZE (nmode);
3475 nbitpos = lbitpos & ~ (nbitsize - 1);
3477 if (nbitsize == lbitsize)
3480 if (BYTES_BIG_ENDIAN)
3481 lbitpos = nbitsize - lbitsize - lbitpos;
3483 /* Make the mask to be used against the extracted field. */
3484 mask = build_int_cst_type (unsigned_type, -1);
3485 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3486 mask = const_binop (RSHIFT_EXPR, mask,
3487 size_int (nbitsize - lbitsize - lbitpos), 0);
3490 /* If not comparing with constant, just rework the comparison
3492 return fold_build2 (code, compare_type,
3493 fold_build2 (BIT_AND_EXPR, unsigned_type,
3494 make_bit_field_ref (linner,
3499 fold_build2 (BIT_AND_EXPR, unsigned_type,
3500 make_bit_field_ref (rinner,
3506 /* Otherwise, we are handling the constant case. See if the constant is too
3507 big for the field. Warn and return a tree of for 0 (false) if so. We do
3508 this not only for its own sake, but to avoid having to test for this
3509 error case below. If we didn't, we might generate wrong code.
3511 For unsigned fields, the constant shifted right by the field length should
3512 be all zero. For signed fields, the high-order bits should agree with
3517 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3518 fold_convert (unsigned_type, rhs),
3519 size_int (lbitsize), 0)))
3521 warning (0, "comparison is always %d due to width of bit-field",
3523 return constant_boolean_node (code == NE_EXPR, compare_type);
3528 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3529 size_int (lbitsize - 1), 0);
3530 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3532 warning (0, "comparison is always %d due to width of bit-field",
3534 return constant_boolean_node (code == NE_EXPR, compare_type);
3538 /* Single-bit compares should always be against zero. */
3539 if (lbitsize == 1 && ! integer_zerop (rhs))
3541 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3542 rhs = build_int_cst (type, 0);
3545 /* Make a new bitfield reference, shift the constant over the
3546 appropriate number of bits and mask it with the computed mask
3547 (in case this was a signed field). If we changed it, make a new one. */
3548 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3551 TREE_SIDE_EFFECTS (lhs) = 1;
3552 TREE_THIS_VOLATILE (lhs) = 1;
3555 rhs = const_binop (BIT_AND_EXPR,
3556 const_binop (LSHIFT_EXPR,
3557 fold_convert (unsigned_type, rhs),
3558 size_int (lbitpos), 0),
3561 return build2 (code, compare_type,
3562 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3566 /* Subroutine for fold_truthop: decode a field reference.
3568 If EXP is a comparison reference, we return the innermost reference.
3570 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3571 set to the starting bit number.
3573 If the innermost field can be completely contained in a mode-sized
3574 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3576 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3577 otherwise it is not changed.
3579 *PUNSIGNEDP is set to the signedness of the field.
3581 *PMASK is set to the mask used. This is either contained in a
3582 BIT_AND_EXPR or derived from the width of the field.
3584 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3586 Return 0 if this is not a component reference or is one that we can't
3587 do anything with. */
3590 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3591 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3592 int *punsignedp, int *pvolatilep,
3593 tree *pmask, tree *pand_mask)
3595 tree outer_type = 0;
3597 tree mask, inner, offset;
3599 unsigned int precision;
3601 /* All the optimizations using this function assume integer fields.
3602 There are problems with FP fields since the type_for_size call
3603 below can fail for, e.g., XFmode. */
3604 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3607 /* We are interested in the bare arrangement of bits, so strip everything
3608 that doesn't affect the machine mode. However, record the type of the
3609 outermost expression if it may matter below. */
3610 if (TREE_CODE (exp) == NOP_EXPR
3611 || TREE_CODE (exp) == CONVERT_EXPR
3612 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3613 outer_type = TREE_TYPE (exp);
3616 if (TREE_CODE (exp) == BIT_AND_EXPR)
3618 and_mask = TREE_OPERAND (exp, 1);
3619 exp = TREE_OPERAND (exp, 0);
3620 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3621 if (TREE_CODE (and_mask) != INTEGER_CST)
3625 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3626 punsignedp, pvolatilep, false);
3627 if ((inner == exp && and_mask == 0)
3628 || *pbitsize < 0 || offset != 0
3629 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3632 /* If the number of bits in the reference is the same as the bitsize of
3633 the outer type, then the outer type gives the signedness. Otherwise
3634 (in case of a small bitfield) the signedness is unchanged. */
3635 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3636 *punsignedp = TYPE_UNSIGNED (outer_type);
3638 /* Compute the mask to access the bitfield. */
3639 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3640 precision = TYPE_PRECISION (unsigned_type);
3642 mask = build_int_cst_type (unsigned_type, -1);
3644 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3645 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3647 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3649 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3650 fold_convert (unsigned_type, and_mask), mask);
3653 *pand_mask = and_mask;
3657 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3661 all_ones_mask_p (tree mask, int size)
3663 tree type = TREE_TYPE (mask);
3664 unsigned int precision = TYPE_PRECISION (type);
3667 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3670 tree_int_cst_equal (mask,
3671 const_binop (RSHIFT_EXPR,
3672 const_binop (LSHIFT_EXPR, tmask,
3673 size_int (precision - size),
3675 size_int (precision - size), 0));
3678 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3679 represents the sign bit of EXP's type. If EXP represents a sign
3680 or zero extension, also test VAL against the unextended type.
3681 The return value is the (sub)expression whose sign bit is VAL,
3682 or NULL_TREE otherwise. */
3685 sign_bit_p (tree exp, tree val)
3687 unsigned HOST_WIDE_INT mask_lo, lo;
3688 HOST_WIDE_INT mask_hi, hi;
3692 /* Tree EXP must have an integral type. */
3693 t = TREE_TYPE (exp);
3694 if (! INTEGRAL_TYPE_P (t))
3697 /* Tree VAL must be an integer constant. */
3698 if (TREE_CODE (val) != INTEGER_CST
3699 || TREE_CONSTANT_OVERFLOW (val))
3702 width = TYPE_PRECISION (t);
3703 if (width > HOST_BITS_PER_WIDE_INT)
3705 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3708 mask_hi = ((unsigned HOST_WIDE_INT) -1
3709 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3715 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3718 mask_lo = ((unsigned HOST_WIDE_INT) -1
3719 >> (HOST_BITS_PER_WIDE_INT - width));
3722 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3723 treat VAL as if it were unsigned. */
3724 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3725 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3728 /* Handle extension from a narrower type. */
3729 if (TREE_CODE (exp) == NOP_EXPR
3730 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3731 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3736 /* Subroutine for fold_truthop: determine if an operand is simple enough
3737 to be evaluated unconditionally. */
3740 simple_operand_p (tree exp)
3742 /* Strip any conversions that don't change the machine mode. */
3745 return (CONSTANT_CLASS_P (exp)
3746 || TREE_CODE (exp) == SSA_NAME
3748 && ! TREE_ADDRESSABLE (exp)
3749 && ! TREE_THIS_VOLATILE (exp)
3750 && ! DECL_NONLOCAL (exp)
3751 /* Don't regard global variables as simple. They may be
3752 allocated in ways unknown to the compiler (shared memory,
3753 #pragma weak, etc). */
3754 && ! TREE_PUBLIC (exp)
3755 && ! DECL_EXTERNAL (exp)
3756 /* Loading a static variable is unduly expensive, but global
3757 registers aren't expensive. */
3758 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3761 /* The following functions are subroutines to fold_range_test and allow it to
3762 try to change a logical combination of comparisons into a range test.
3765 X == 2 || X == 3 || X == 4 || X == 5
3769 (unsigned) (X - 2) <= 3
3771 We describe each set of comparisons as being either inside or outside
3772 a range, using a variable named like IN_P, and then describe the
3773 range with a lower and upper bound. If one of the bounds is omitted,
3774 it represents either the highest or lowest value of the type.
3776 In the comments below, we represent a range by two numbers in brackets
3777 preceded by a "+" to designate being inside that range, or a "-" to
3778 designate being outside that range, so the condition can be inverted by
3779 flipping the prefix. An omitted bound is represented by a "-". For
3780 example, "- [-, 10]" means being outside the range starting at the lowest
3781 possible value and ending at 10, in other words, being greater than 10.
3782 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3785 We set up things so that the missing bounds are handled in a consistent
3786 manner so neither a missing bound nor "true" and "false" need to be
3787 handled using a special case. */
3789 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3790 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3791 and UPPER1_P are nonzero if the respective argument is an upper bound
3792 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3793 must be specified for a comparison. ARG1 will be converted to ARG0's
3794 type if both are specified. */
3797 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3798 tree arg1, int upper1_p)
3804 /* If neither arg represents infinity, do the normal operation.
3805 Else, if not a comparison, return infinity. Else handle the special
3806 comparison rules. Note that most of the cases below won't occur, but
3807 are handled for consistency. */
3809 if (arg0 != 0 && arg1 != 0)
3811 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3812 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3814 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3817 if (TREE_CODE_CLASS (code) != tcc_comparison)
3820 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3821 for neither. In real maths, we cannot assume open ended ranges are
3822 the same. But, this is computer arithmetic, where numbers are finite.
3823 We can therefore make the transformation of any unbounded range with
3824 the value Z, Z being greater than any representable number. This permits
3825 us to treat unbounded ranges as equal. */
3826 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3827 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3831 result = sgn0 == sgn1;
3834 result = sgn0 != sgn1;
3837 result = sgn0 < sgn1;
3840 result = sgn0 <= sgn1;
3843 result = sgn0 > sgn1;
3846 result = sgn0 >= sgn1;
3852 return constant_boolean_node (result, type);
3855 /* Given EXP, a logical expression, set the range it is testing into
3856 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3857 actually being tested. *PLOW and *PHIGH will be made of the same type
3858 as the returned expression. If EXP is not a comparison, we will most
3859 likely not be returning a useful value and range. */
3862 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3864 enum tree_code code;
3865 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3866 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3868 tree low, high, n_low, n_high;
3870 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3871 and see if we can refine the range. Some of the cases below may not
3872 happen, but it doesn't seem worth worrying about this. We "continue"
3873 the outer loop when we've changed something; otherwise we "break"
3874 the switch, which will "break" the while. */
3877 low = high = build_int_cst (TREE_TYPE (exp), 0);
3881 code = TREE_CODE (exp);
3882 exp_type = TREE_TYPE (exp);
3884 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3886 if (TREE_CODE_LENGTH (code) > 0)
3887 arg0 = TREE_OPERAND (exp, 0);
3888 if (TREE_CODE_CLASS (code) == tcc_comparison
3889 || TREE_CODE_CLASS (code) == tcc_unary
3890 || TREE_CODE_CLASS (code) == tcc_binary)
3891 arg0_type = TREE_TYPE (arg0);
3892 if (TREE_CODE_CLASS (code) == tcc_binary
3893 || TREE_CODE_CLASS (code) == tcc_comparison
3894 || (TREE_CODE_CLASS (code) == tcc_expression
3895 && TREE_CODE_LENGTH (code) > 1))
3896 arg1 = TREE_OPERAND (exp, 1);
3901 case TRUTH_NOT_EXPR:
3902 in_p = ! in_p, exp = arg0;
3905 case EQ_EXPR: case NE_EXPR:
3906 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3907 /* We can only do something if the range is testing for zero
3908 and if the second operand is an integer constant. Note that
3909 saying something is "in" the range we make is done by
3910 complementing IN_P since it will set in the initial case of
3911 being not equal to zero; "out" is leaving it alone. */
3912 if (low == 0 || high == 0
3913 || ! integer_zerop (low) || ! integer_zerop (high)
3914 || TREE_CODE (arg1) != INTEGER_CST)
3919 case NE_EXPR: /* - [c, c] */
3922 case EQ_EXPR: /* + [c, c] */
3923 in_p = ! in_p, low = high = arg1;
3925 case GT_EXPR: /* - [-, c] */
3926 low = 0, high = arg1;
3928 case GE_EXPR: /* + [c, -] */
3929 in_p = ! in_p, low = arg1, high = 0;
3931 case LT_EXPR: /* - [c, -] */
3932 low = arg1, high = 0;
3934 case LE_EXPR: /* + [-, c] */
3935 in_p = ! in_p, low = 0, high = arg1;
3941 /* If this is an unsigned comparison, we also know that EXP is
3942 greater than or equal to zero. We base the range tests we make
3943 on that fact, so we record it here so we can parse existing
3944 range tests. We test arg0_type since often the return type
3945 of, e.g. EQ_EXPR, is boolean. */
3946 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3948 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3950 build_int_cst (arg0_type, 0),
3954 in_p = n_in_p, low = n_low, high = n_high;
3956 /* If the high bound is missing, but we have a nonzero low
3957 bound, reverse the range so it goes from zero to the low bound
3959 if (high == 0 && low && ! integer_zerop (low))
3962 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3963 integer_one_node, 0);
3964 low = build_int_cst (arg0_type, 0);
3972 /* (-x) IN [a,b] -> x in [-b, -a] */
3973 n_low = range_binop (MINUS_EXPR, exp_type,
3974 build_int_cst (exp_type, 0),
3976 n_high = range_binop (MINUS_EXPR, exp_type,
3977 build_int_cst (exp_type, 0),
3979 low = n_low, high = n_high;
3985 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3986 build_int_cst (exp_type, 1));
3989 case PLUS_EXPR: case MINUS_EXPR:
3990 if (TREE_CODE (arg1) != INTEGER_CST)
3993 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3994 move a constant to the other side. */
3995 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3998 /* If EXP is signed, any overflow in the computation is undefined,
3999 so we don't worry about it so long as our computations on
4000 the bounds don't overflow. For unsigned, overflow is defined
4001 and this is exactly the right thing. */
4002 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4003 arg0_type, low, 0, arg1, 0);
4004 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4005 arg0_type, high, 1, arg1, 0);
4006 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4007 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4010 /* Check for an unsigned range which has wrapped around the maximum
4011 value thus making n_high < n_low, and normalize it. */
4012 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4014 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4015 integer_one_node, 0);
4016 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4017 integer_one_node, 0);
4019 /* If the range is of the form +/- [ x+1, x ], we won't
4020 be able to normalize it. But then, it represents the
4021 whole range or the empty set, so make it
4023 if (tree_int_cst_equal (n_low, low)
4024 && tree_int_cst_equal (n_high, high))
4030 low = n_low, high = n_high;
4035 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4036 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4039 if (! INTEGRAL_TYPE_P (arg0_type)
4040 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4041 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4044 n_low = low, n_high = high;
4047 n_low = fold_convert (arg0_type, n_low);
4050 n_high = fold_convert (arg0_type, n_high);
4053 /* If we're converting arg0 from an unsigned type, to exp,
4054 a signed type, we will be doing the comparison as unsigned.
4055 The tests above have already verified that LOW and HIGH
4058 So we have to ensure that we will handle large unsigned
4059 values the same way that the current signed bounds treat
4062 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4065 tree equiv_type = lang_hooks.types.type_for_mode
4066 (TYPE_MODE (arg0_type), 1);
4068 /* A range without an upper bound is, naturally, unbounded.
4069 Since convert would have cropped a very large value, use
4070 the max value for the destination type. */
4072 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4073 : TYPE_MAX_VALUE (arg0_type);
4075 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4076 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4077 fold_convert (arg0_type,
4079 build_int_cst (arg0_type, 1));
4081 /* If the low bound is specified, "and" the range with the
4082 range for which the original unsigned value will be
4086 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4087 1, n_low, n_high, 1,
4088 fold_convert (arg0_type,
4093 in_p = (n_in_p == in_p);
4097 /* Otherwise, "or" the range with the range of the input
4098 that will be interpreted as negative. */
4099 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4100 0, n_low, n_high, 1,
4101 fold_convert (arg0_type,
4106 in_p = (in_p != n_in_p);
4111 low = n_low, high = n_high;
4121 /* If EXP is a constant, we can evaluate whether this is true or false. */
4122 if (TREE_CODE (exp) == INTEGER_CST)
4124 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4126 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4132 *pin_p = in_p, *plow = low, *phigh = high;
4136 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4137 type, TYPE, return an expression to test if EXP is in (or out of, depending
4138 on IN_P) the range. Return 0 if the test couldn't be created. */
4141 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4143 tree etype = TREE_TYPE (exp);
4146 #ifdef HAVE_canonicalize_funcptr_for_compare
4147 /* Disable this optimization for function pointer expressions
4148 on targets that require function pointer canonicalization. */
4149 if (HAVE_canonicalize_funcptr_for_compare
4150 && TREE_CODE (etype) == POINTER_TYPE
4151 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4157 value = build_range_check (type, exp, 1, low, high);
4159 return invert_truthvalue (value);
4164 if (low == 0 && high == 0)
4165 return build_int_cst (type, 1);
4168 return fold_build2 (LE_EXPR, type, exp,
4169 fold_convert (etype, high));
4172 return fold_build2 (GE_EXPR, type, exp,
4173 fold_convert (etype, low));
4175 if (operand_equal_p (low, high, 0))
4176 return fold_build2 (EQ_EXPR, type, exp,
4177 fold_convert (etype, low));
4179 if (integer_zerop (low))
4181 if (! TYPE_UNSIGNED (etype))
4183 etype = lang_hooks.types.unsigned_type (etype);
4184 high = fold_convert (etype, high);
4185 exp = fold_convert (etype, exp);
4187 return build_range_check (type, exp, 1, 0, high);
4190 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4191 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4193 unsigned HOST_WIDE_INT lo;
4197 prec = TYPE_PRECISION (etype);
4198 if (prec <= HOST_BITS_PER_WIDE_INT)
4201 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4205 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4206 lo = (unsigned HOST_WIDE_INT) -1;
4209 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4211 if (TYPE_UNSIGNED (etype))
4213 etype = lang_hooks.types.signed_type (etype);
4214 exp = fold_convert (etype, exp);
4216 return fold_build2 (GT_EXPR, type, exp,
4217 build_int_cst (etype, 0));
4221 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4222 This requires wrap-around arithmetics for the type of the expression. */
4223 switch (TREE_CODE (etype))
4226 /* There is no requirement that LOW be within the range of ETYPE
4227 if the latter is a subtype. It must, however, be within the base
4228 type of ETYPE. So be sure we do the subtraction in that type. */
4229 if (TREE_TYPE (etype))
4230 etype = TREE_TYPE (etype);
4235 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4236 TYPE_UNSIGNED (etype));
4243 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4244 if (TREE_CODE (etype) == INTEGER_TYPE
4245 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4247 tree utype, minv, maxv;
4249 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4250 for the type in question, as we rely on this here. */
4251 utype = lang_hooks.types.unsigned_type (etype);
4252 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4253 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4254 integer_one_node, 1);
4255 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4257 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4264 high = fold_convert (etype, high);
4265 low = fold_convert (etype, low);
4266 exp = fold_convert (etype, exp);
4268 value = const_binop (MINUS_EXPR, high, low, 0);
4270 if (value != 0 && !TREE_OVERFLOW (value))
4271 return build_range_check (type,
4272 fold_build2 (MINUS_EXPR, etype, exp, low),
4273 1, build_int_cst (etype, 0), value);
4278 /* Return the predecessor of VAL in its type, handling the infinite case. */
4281 range_predecessor (tree val)
4283 tree type = TREE_TYPE (val);
4285 if (INTEGRAL_TYPE_P (type)
4286 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4289 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4292 /* Return the successor of VAL in its type, handling the infinite case. */
4295 range_successor (tree val)
4297 tree type = TREE_TYPE (val);
4299 if (INTEGRAL_TYPE_P (type)
4300 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4303 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4306 /* Given two ranges, see if we can merge them into one. Return 1 if we
4307 can, 0 if we can't. Set the output range into the specified parameters. */
4310 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4311 tree high0, int in1_p, tree low1, tree high1)
4319 int lowequal = ((low0 == 0 && low1 == 0)
4320 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4321 low0, 0, low1, 0)));
4322 int highequal = ((high0 == 0 && high1 == 0)
4323 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4324 high0, 1, high1, 1)));
4326 /* Make range 0 be the range that starts first, or ends last if they
4327 start at the same value. Swap them if it isn't. */
4328 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4331 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4332 high1, 1, high0, 1))))
4334 temp = in0_p, in0_p = in1_p, in1_p = temp;
4335 tem = low0, low0 = low1, low1 = tem;
4336 tem = high0, high0 = high1, high1 = tem;
4339 /* Now flag two cases, whether the ranges are disjoint or whether the
4340 second range is totally subsumed in the first. Note that the tests
4341 below are simplified by the ones above. */
4342 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4343 high0, 1, low1, 0));
4344 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4345 high1, 1, high0, 1));
4347 /* We now have four cases, depending on whether we are including or
4348 excluding the two ranges. */
4351 /* If they don't overlap, the result is false. If the second range
4352 is a subset it is the result. Otherwise, the range is from the start
4353 of the second to the end of the first. */
4355 in_p = 0, low = high = 0;
4357 in_p = 1, low = low1, high = high1;
4359 in_p = 1, low = low1, high = high0;
4362 else if (in0_p && ! in1_p)
4364 /* If they don't overlap, the result is the first range. If they are
4365 equal, the result is false. If the second range is a subset of the
4366 first, and the ranges begin at the same place, we go from just after
4367 the end of the second range to the end of the first. If the second
4368 range is not a subset of the first, or if it is a subset and both
4369 ranges end at the same place, the range starts at the start of the
4370 first range and ends just before the second range.
4371 Otherwise, we can't describe this as a single range. */
4373 in_p = 1, low = low0, high = high0;
4374 else if (lowequal && highequal)
4375 in_p = 0, low = high = 0;
4376 else if (subset && lowequal)
4378 low = range_successor (high1);
4382 else if (! subset || highequal)
4385 high = range_predecessor (low1);
4392 else if (! in0_p && in1_p)
4394 /* If they don't overlap, the result is the second range. If the second
4395 is a subset of the first, the result is false. Otherwise,
4396 the range starts just after the first range and ends at the
4397 end of the second. */
4399 in_p = 1, low = low1, high = high1;
4400 else if (subset || highequal)
4401 in_p = 0, low = high = 0;
4404 low = range_successor (high0);
4412 /* The case where we are excluding both ranges. Here the complex case
4413 is if they don't overlap. In that case, the only time we have a
4414 range is if they are adjacent. If the second is a subset of the
4415 first, the result is the first. Otherwise, the range to exclude
4416 starts at the beginning of the first range and ends at the end of the
4420 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4421 range_successor (high0),
4423 in_p = 0, low = low0, high = high1;
4426 /* Canonicalize - [min, x] into - [-, x]. */
4427 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4428 switch (TREE_CODE (TREE_TYPE (low0)))
4431 if (TYPE_PRECISION (TREE_TYPE (low0))
4432 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4436 if (tree_int_cst_equal (low0,
4437 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4441 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4442 && integer_zerop (low0))
4449 /* Canonicalize - [x, max] into - [x, -]. */
4450 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4451 switch (TREE_CODE (TREE_TYPE (high1)))
4454 if (TYPE_PRECISION (TREE_TYPE (high1))
4455 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4459 if (tree_int_cst_equal (high1,
4460 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4464 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4465 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4467 integer_one_node, 1)))
4474 /* The ranges might be also adjacent between the maximum and
4475 minimum values of the given type. For
4476 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4477 return + [x + 1, y - 1]. */
4478 if (low0 == 0 && high1 == 0)
4480 low = range_successor (high0);
4481 high = range_predecessor (low1);
4482 if (low == 0 || high == 0)
4492 in_p = 0, low = low0, high = high0;
4494 in_p = 0, low = low0, high = high1;
4497 *pin_p = in_p, *plow = low, *phigh = high;
4502 /* Subroutine of fold, looking inside expressions of the form
4503 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4504 of the COND_EXPR. This function is being used also to optimize
4505 A op B ? C : A, by reversing the comparison first.
4507 Return a folded expression whose code is not a COND_EXPR
4508 anymore, or NULL_TREE if no folding opportunity is found. */
4511 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4513 enum tree_code comp_code = TREE_CODE (arg0);
4514 tree arg00 = TREE_OPERAND (arg0, 0);
4515 tree arg01 = TREE_OPERAND (arg0, 1);
4516 tree arg1_type = TREE_TYPE (arg1);
4522 /* If we have A op 0 ? A : -A, consider applying the following
4525 A == 0? A : -A same as -A
4526 A != 0? A : -A same as A
4527 A >= 0? A : -A same as abs (A)
4528 A > 0? A : -A same as abs (A)
4529 A <= 0? A : -A same as -abs (A)
4530 A < 0? A : -A same as -abs (A)
4532 None of these transformations work for modes with signed
4533 zeros. If A is +/-0, the first two transformations will
4534 change the sign of the result (from +0 to -0, or vice
4535 versa). The last four will fix the sign of the result,
4536 even though the original expressions could be positive or
4537 negative, depending on the sign of A.
4539 Note that all these transformations are correct if A is
4540 NaN, since the two alternatives (A and -A) are also NaNs. */
4541 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4542 ? real_zerop (arg01)
4543 : integer_zerop (arg01))
4544 && ((TREE_CODE (arg2) == NEGATE_EXPR
4545 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4546 /* In the case that A is of the form X-Y, '-A' (arg2) may
4547 have already been folded to Y-X, check for that. */
4548 || (TREE_CODE (arg1) == MINUS_EXPR
4549 && TREE_CODE (arg2) == MINUS_EXPR
4550 && operand_equal_p (TREE_OPERAND (arg1, 0),
4551 TREE_OPERAND (arg2, 1), 0)
4552 && operand_equal_p (TREE_OPERAND (arg1, 1),
4553 TREE_OPERAND (arg2, 0), 0))))
4558 tem = fold_convert (arg1_type, arg1);
4559 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4562 return pedantic_non_lvalue (fold_convert (type, arg1));
4565 if (flag_trapping_math)
4570 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4571 arg1 = fold_convert (lang_hooks.types.signed_type
4572 (TREE_TYPE (arg1)), arg1);
4573 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4574 return pedantic_non_lvalue (fold_convert (type, tem));
4577 if (flag_trapping_math)
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert (lang_hooks.types.signed_type
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return negate_expr (fold_convert (type, tem));
4587 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4591 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4592 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4593 both transformations are correct when A is NaN: A != 0
4594 is then true, and A == 0 is false. */
4596 if (integer_zerop (arg01) && integer_zerop (arg2))
4598 if (comp_code == NE_EXPR)
4599 return pedantic_non_lvalue (fold_convert (type, arg1));
4600 else if (comp_code == EQ_EXPR)
4601 return build_int_cst (type, 0);
4604 /* Try some transformations of A op B ? A : B.
4606 A == B? A : B same as B
4607 A != B? A : B same as A
4608 A >= B? A : B same as max (A, B)
4609 A > B? A : B same as max (B, A)
4610 A <= B? A : B same as min (A, B)
4611 A < B? A : B same as min (B, A)
4613 As above, these transformations don't work in the presence
4614 of signed zeros. For example, if A and B are zeros of
4615 opposite sign, the first two transformations will change
4616 the sign of the result. In the last four, the original
4617 expressions give different results for (A=+0, B=-0) and
4618 (A=-0, B=+0), but the transformed expressions do not.
4620 The first two transformations are correct if either A or B
4621 is a NaN. In the first transformation, the condition will
4622 be false, and B will indeed be chosen. In the case of the
4623 second transformation, the condition A != B will be true,
4624 and A will be chosen.
4626 The conversions to max() and min() are not correct if B is
4627 a number and A is not. The conditions in the original
4628 expressions will be false, so all four give B. The min()
4629 and max() versions would give a NaN instead. */
4630 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4631 /* Avoid these transformations if the COND_EXPR may be used
4632 as an lvalue in the C++ front-end. PR c++/19199. */
4634 || (strcmp (lang_hooks.name, "GNU C++") != 0
4635 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4636 || ! maybe_lvalue_p (arg1)
4637 || ! maybe_lvalue_p (arg2)))
4639 tree comp_op0 = arg00;
4640 tree comp_op1 = arg01;
4641 tree comp_type = TREE_TYPE (comp_op0);
4643 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4644 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4654 return pedantic_non_lvalue (fold_convert (type, arg2));
4656 return pedantic_non_lvalue (fold_convert (type, arg1));
4661 /* In C++ a ?: expression can be an lvalue, so put the
4662 operand which will be used if they are equal first
4663 so that we can convert this back to the
4664 corresponding COND_EXPR. */
4665 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4667 comp_op0 = fold_convert (comp_type, comp_op0);
4668 comp_op1 = fold_convert (comp_type, comp_op1);
4669 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4670 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4671 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4672 return pedantic_non_lvalue (fold_convert (type, tem));
4679 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4681 comp_op0 = fold_convert (comp_type, comp_op0);
4682 comp_op1 = fold_convert (comp_type, comp_op1);
4683 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4684 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4685 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4686 return pedantic_non_lvalue (fold_convert (type, tem));
4690 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4691 return pedantic_non_lvalue (fold_convert (type, arg2));
4694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4695 return pedantic_non_lvalue (fold_convert (type, arg1));
4698 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4703 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4704 we might still be able to simplify this. For example,
4705 if C1 is one less or one more than C2, this might have started
4706 out as a MIN or MAX and been transformed by this function.
4707 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4709 if (INTEGRAL_TYPE_P (type)
4710 && TREE_CODE (arg01) == INTEGER_CST
4711 && TREE_CODE (arg2) == INTEGER_CST)
4715 /* We can replace A with C1 in this case. */
4716 arg1 = fold_convert (type, arg01);
4717 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4720 /* If C1 is C2 + 1, this is min(A, C2). */
4721 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4723 && operand_equal_p (arg01,
4724 const_binop (PLUS_EXPR, arg2,
4725 build_int_cst (type, 1), 0),
4727 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4732 /* If C1 is C2 - 1, this is min(A, C2). */
4733 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4735 && operand_equal_p (arg01,
4736 const_binop (MINUS_EXPR, arg2,
4737 build_int_cst (type, 1), 0),
4739 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4744 /* If C1 is C2 - 1, this is max(A, C2). */
4745 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4747 && operand_equal_p (arg01,
4748 const_binop (MINUS_EXPR, arg2,
4749 build_int_cst (type, 1), 0),
4751 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4756 /* If C1 is C2 + 1, this is max(A, C2). */
4757 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4759 && operand_equal_p (arg01,
4760 const_binop (PLUS_EXPR, arg2,
4761 build_int_cst (type, 1), 0),
4763 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4777 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4778 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4781 /* EXP is some logical combination of boolean tests. See if we can
4782 merge it into some range test. Return the new tree if so. */
4785 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4787 int or_op = (code == TRUTH_ORIF_EXPR
4788 || code == TRUTH_OR_EXPR);
4789 int in0_p, in1_p, in_p;
4790 tree low0, low1, low, high0, high1, high;
4791 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4792 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4795 /* If this is an OR operation, invert both sides; we will invert
4796 again at the end. */
4798 in0_p = ! in0_p, in1_p = ! in1_p;
4800 /* If both expressions are the same, if we can merge the ranges, and we
4801 can build the range test, return it or it inverted. If one of the
4802 ranges is always true or always false, consider it to be the same
4803 expression as the other. */
4804 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4805 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4807 && 0 != (tem = (build_range_check (type,
4809 : rhs != 0 ? rhs : integer_zero_node,
4811 return or_op ? invert_truthvalue (tem) : tem;
4813 /* On machines where the branch cost is expensive, if this is a
4814 short-circuited branch and the underlying object on both sides
4815 is the same, make a non-short-circuit operation. */
4816 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4817 && lhs != 0 && rhs != 0
4818 && (code == TRUTH_ANDIF_EXPR
4819 || code == TRUTH_ORIF_EXPR)
4820 && operand_equal_p (lhs, rhs, 0))
4822 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4823 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4824 which cases we can't do this. */
4825 if (simple_operand_p (lhs))
4826 return build2 (code == TRUTH_ANDIF_EXPR
4827 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4830 else if (lang_hooks.decls.global_bindings_p () == 0
4831 && ! CONTAINS_PLACEHOLDER_P (lhs))
4833 tree common = save_expr (lhs);
4835 if (0 != (lhs = build_range_check (type, common,
4836 or_op ? ! in0_p : in0_p,
4838 && (0 != (rhs = build_range_check (type, common,
4839 or_op ? ! in1_p : in1_p,
4841 return build2 (code == TRUTH_ANDIF_EXPR
4842 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4850 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4851 bit value. Arrange things so the extra bits will be set to zero if and
4852 only if C is signed-extended to its full width. If MASK is nonzero,
4853 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4856 unextend (tree c, int p, int unsignedp, tree mask)
4858 tree type = TREE_TYPE (c);
4859 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4862 if (p == modesize || unsignedp)
4865 /* We work by getting just the sign bit into the low-order bit, then
4866 into the high-order bit, then sign-extend. We then XOR that value
4868 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4869 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4871 /* We must use a signed type in order to get an arithmetic right shift.
4872 However, we must also avoid introducing accidental overflows, so that
4873 a subsequent call to integer_zerop will work. Hence we must
4874 do the type conversion here. At this point, the constant is either
4875 zero or one, and the conversion to a signed type can never overflow.
4876 We could get an overflow if this conversion is done anywhere else. */
4877 if (TYPE_UNSIGNED (type))
4878 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4880 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4881 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4883 temp = const_binop (BIT_AND_EXPR, temp,
4884 fold_convert (TREE_TYPE (c), mask), 0);
4885 /* If necessary, convert the type back to match the type of C. */
4886 if (TYPE_UNSIGNED (type))
4887 temp = fold_convert (type, temp);
4889 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4892 /* Find ways of folding logical expressions of LHS and RHS:
4893 Try to merge two comparisons to the same innermost item.
4894 Look for range tests like "ch >= '0' && ch <= '9'".
4895 Look for combinations of simple terms on machines with expensive branches
4896 and evaluate the RHS unconditionally.
4898 For example, if we have p->a == 2 && p->b == 4 and we can make an
4899 object large enough to span both A and B, we can do this with a comparison
4900 against the object ANDed with the a mask.
4902 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4903 operations to do this with one comparison.
4905 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4906 function and the one above.
4908 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4909 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4911 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4914 We return the simplified tree or 0 if no optimization is possible. */
4917 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4919 /* If this is the "or" of two comparisons, we can do something if
4920 the comparisons are NE_EXPR. If this is the "and", we can do something
4921 if the comparisons are EQ_EXPR. I.e.,
4922 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4924 WANTED_CODE is this operation code. For single bit fields, we can
4925 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4926 comparison for one-bit fields. */
4928 enum tree_code wanted_code;
4929 enum tree_code lcode, rcode;
4930 tree ll_arg, lr_arg, rl_arg, rr_arg;
4931 tree ll_inner, lr_inner, rl_inner, rr_inner;
4932 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4933 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4934 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4935 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4936 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4937 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4938 enum machine_mode lnmode, rnmode;
4939 tree ll_mask, lr_mask, rl_mask, rr_mask;
4940 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4941 tree l_const, r_const;
4942 tree lntype, rntype, result;
4943 int first_bit, end_bit;
4945 tree orig_lhs = lhs, orig_rhs = rhs;
4946 enum tree_code orig_code = code;
4948 /* Start by getting the comparison codes. Fail if anything is volatile.
4949 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4950 it were surrounded with a NE_EXPR. */
4952 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4955 lcode = TREE_CODE (lhs);
4956 rcode = TREE_CODE (rhs);
4958 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4960 lhs = build2 (NE_EXPR, truth_type, lhs,
4961 build_int_cst (TREE_TYPE (lhs), 0));
4965 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4967 rhs = build2 (NE_EXPR, truth_type, rhs,
4968 build_int_cst (TREE_TYPE (rhs), 0));
4972 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4973 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4976 ll_arg = TREE_OPERAND (lhs, 0);
4977 lr_arg = TREE_OPERAND (lhs, 1);
4978 rl_arg = TREE_OPERAND (rhs, 0);
4979 rr_arg = TREE_OPERAND (rhs, 1);
4981 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4982 if (simple_operand_p (ll_arg)
4983 && simple_operand_p (lr_arg))
4986 if (operand_equal_p (ll_arg, rl_arg, 0)
4987 && operand_equal_p (lr_arg, rr_arg, 0))
4989 result = combine_comparisons (code, lcode, rcode,
4990 truth_type, ll_arg, lr_arg);
4994 else if (operand_equal_p (ll_arg, rr_arg, 0)
4995 && operand_equal_p (lr_arg, rl_arg, 0))
4997 result = combine_comparisons (code, lcode,
4998 swap_tree_comparison (rcode),
4999 truth_type, ll_arg, lr_arg);
5005 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5006 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5008 /* If the RHS can be evaluated unconditionally and its operands are
5009 simple, it wins to evaluate the RHS unconditionally on machines
5010 with expensive branches. In this case, this isn't a comparison
5011 that can be merged. Avoid doing this if the RHS is a floating-point
5012 comparison since those can trap. */
5014 if (BRANCH_COST >= 2
5015 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5016 && simple_operand_p (rl_arg)
5017 && simple_operand_p (rr_arg))
5019 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5020 if (code == TRUTH_OR_EXPR
5021 && lcode == NE_EXPR && integer_zerop (lr_arg)
5022 && rcode == NE_EXPR && integer_zerop (rr_arg)
5023 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5024 return build2 (NE_EXPR, truth_type,
5025 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5027 build_int_cst (TREE_TYPE (ll_arg), 0));
5029 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5030 if (code == TRUTH_AND_EXPR
5031 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5032 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5033 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5034 return build2 (EQ_EXPR, truth_type,
5035 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5037 build_int_cst (TREE_TYPE (ll_arg), 0));
5039 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5041 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5042 return build2 (code, truth_type, lhs, rhs);
5047 /* See if the comparisons can be merged. Then get all the parameters for
5050 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5051 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5055 ll_inner = decode_field_reference (ll_arg,
5056 &ll_bitsize, &ll_bitpos, &ll_mode,
5057 &ll_unsignedp, &volatilep, &ll_mask,
5059 lr_inner = decode_field_reference (lr_arg,
5060 &lr_bitsize, &lr_bitpos, &lr_mode,
5061 &lr_unsignedp, &volatilep, &lr_mask,
5063 rl_inner = decode_field_reference (rl_arg,
5064 &rl_bitsize, &rl_bitpos, &rl_mode,
5065 &rl_unsignedp, &volatilep, &rl_mask,
5067 rr_inner = decode_field_reference (rr_arg,
5068 &rr_bitsize, &rr_bitpos, &rr_mode,
5069 &rr_unsignedp, &volatilep, &rr_mask,
5072 /* It must be true that the inner operation on the lhs of each
5073 comparison must be the same if we are to be able to do anything.
5074 Then see if we have constants. If not, the same must be true for
5076 if (volatilep || ll_inner == 0 || rl_inner == 0
5077 || ! operand_equal_p (ll_inner, rl_inner, 0))
5080 if (TREE_CODE (lr_arg) == INTEGER_CST
5081 && TREE_CODE (rr_arg) == INTEGER_CST)
5082 l_const = lr_arg, r_const = rr_arg;
5083 else if (lr_inner == 0 || rr_inner == 0
5084 || ! operand_equal_p (lr_inner, rr_inner, 0))
5087 l_const = r_const = 0;
5089 /* If either comparison code is not correct for our logical operation,
5090 fail. However, we can convert a one-bit comparison against zero into
5091 the opposite comparison against that bit being set in the field. */
5093 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5094 if (lcode != wanted_code)
5096 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5098 /* Make the left operand unsigned, since we are only interested
5099 in the value of one bit. Otherwise we are doing the wrong
5108 /* This is analogous to the code for l_const above. */
5109 if (rcode != wanted_code)
5111 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5120 /* See if we can find a mode that contains both fields being compared on
5121 the left. If we can't, fail. Otherwise, update all constants and masks
5122 to be relative to a field of that size. */
5123 first_bit = MIN (ll_bitpos, rl_bitpos);
5124 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5125 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5126 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5128 if (lnmode == VOIDmode)
5131 lnbitsize = GET_MODE_BITSIZE (lnmode);
5132 lnbitpos = first_bit & ~ (lnbitsize - 1);
5133 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5134 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5136 if (BYTES_BIG_ENDIAN)
5138 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5139 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5142 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5143 size_int (xll_bitpos), 0);
5144 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5145 size_int (xrl_bitpos), 0);
5149 l_const = fold_convert (lntype, l_const);
5150 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5151 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5152 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5153 fold_build1 (BIT_NOT_EXPR,
5157 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5159 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5164 r_const = fold_convert (lntype, r_const);
5165 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5166 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5167 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5168 fold_build1 (BIT_NOT_EXPR,
5172 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5174 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5178 /* If the right sides are not constant, do the same for it. Also,
5179 disallow this optimization if a size or signedness mismatch occurs
5180 between the left and right sides. */
5183 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5184 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5185 /* Make sure the two fields on the right
5186 correspond to the left without being swapped. */
5187 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5190 first_bit = MIN (lr_bitpos, rr_bitpos);
5191 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5192 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5193 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5195 if (rnmode == VOIDmode)
5198 rnbitsize = GET_MODE_BITSIZE (rnmode);
5199 rnbitpos = first_bit & ~ (rnbitsize - 1);
5200 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5201 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5203 if (BYTES_BIG_ENDIAN)
5205 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5206 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5209 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5210 size_int (xlr_bitpos), 0);
5211 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5212 size_int (xrr_bitpos), 0);
5214 /* Make a mask that corresponds to both fields being compared.
5215 Do this for both items being compared. If the operands are the
5216 same size and the bits being compared are in the same position
5217 then we can do this by masking both and comparing the masked
5219 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5220 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5221 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5223 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5224 ll_unsignedp || rl_unsignedp);
5225 if (! all_ones_mask_p (ll_mask, lnbitsize))
5226 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5228 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5229 lr_unsignedp || rr_unsignedp);
5230 if (! all_ones_mask_p (lr_mask, rnbitsize))
5231 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5233 return build2 (wanted_code, truth_type, lhs, rhs);
5236 /* There is still another way we can do something: If both pairs of
5237 fields being compared are adjacent, we may be able to make a wider
5238 field containing them both.
5240 Note that we still must mask the lhs/rhs expressions. Furthermore,
5241 the mask must be shifted to account for the shift done by
5242 make_bit_field_ref. */
5243 if ((ll_bitsize + ll_bitpos == rl_bitpos
5244 && lr_bitsize + lr_bitpos == rr_bitpos)
5245 || (ll_bitpos == rl_bitpos + rl_bitsize
5246 && lr_bitpos == rr_bitpos + rr_bitsize))
5250 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5251 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5252 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5253 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5255 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5256 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5257 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5258 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5260 /* Convert to the smaller type before masking out unwanted bits. */
5262 if (lntype != rntype)
5264 if (lnbitsize > rnbitsize)
5266 lhs = fold_convert (rntype, lhs);
5267 ll_mask = fold_convert (rntype, ll_mask);
5270 else if (lnbitsize < rnbitsize)
5272 rhs = fold_convert (lntype, rhs);
5273 lr_mask = fold_convert (lntype, lr_mask);
5278 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5279 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5281 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5282 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5284 return build2 (wanted_code, truth_type, lhs, rhs);
5290 /* Handle the case of comparisons with constants. If there is something in
5291 common between the masks, those bits of the constants must be the same.
5292 If not, the condition is always false. Test for this to avoid generating
5293 incorrect code below. */
5294 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5295 if (! integer_zerop (result)
5296 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5297 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5299 if (wanted_code == NE_EXPR)
5301 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5302 return constant_boolean_node (true, truth_type);
5306 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5307 return constant_boolean_node (false, truth_type);
5311 /* Construct the expression we will return. First get the component
5312 reference we will make. Unless the mask is all ones the width of
5313 that field, perform the mask operation. Then compare with the
5315 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5316 ll_unsignedp || rl_unsignedp);
5318 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5319 if (! all_ones_mask_p (ll_mask, lnbitsize))
5320 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5322 return build2 (wanted_code, truth_type, result,
5323 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5326 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5330 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5333 enum tree_code op_code;
5334 tree comp_const = op1;
5336 int consts_equal, consts_lt;
5339 STRIP_SIGN_NOPS (arg0);
5341 op_code = TREE_CODE (arg0);
5342 minmax_const = TREE_OPERAND (arg0, 1);
5343 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5344 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5345 inner = TREE_OPERAND (arg0, 0);
5347 /* If something does not permit us to optimize, return the original tree. */
5348 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5349 || TREE_CODE (comp_const) != INTEGER_CST
5350 || TREE_CONSTANT_OVERFLOW (comp_const)
5351 || TREE_CODE (minmax_const) != INTEGER_CST
5352 || TREE_CONSTANT_OVERFLOW (minmax_const))
5355 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5356 and GT_EXPR, doing the rest with recursive calls using logical
5360 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5362 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5365 return invert_truthvalue (tem);
5371 fold_build2 (TRUTH_ORIF_EXPR, type,
5372 optimize_minmax_comparison
5373 (EQ_EXPR, type, arg0, comp_const),
5374 optimize_minmax_comparison
5375 (GT_EXPR, type, arg0, comp_const));
5378 if (op_code == MAX_EXPR && consts_equal)
5379 /* MAX (X, 0) == 0 -> X <= 0 */
5380 return fold_build2 (LE_EXPR, type, inner, comp_const);
5382 else if (op_code == MAX_EXPR && consts_lt)
5383 /* MAX (X, 0) == 5 -> X == 5 */
5384 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5386 else if (op_code == MAX_EXPR)
5387 /* MAX (X, 0) == -1 -> false */
5388 return omit_one_operand (type, integer_zero_node, inner);
5390 else if (consts_equal)
5391 /* MIN (X, 0) == 0 -> X >= 0 */
5392 return fold_build2 (GE_EXPR, type, inner, comp_const);
5395 /* MIN (X, 0) == 5 -> false */
5396 return omit_one_operand (type, integer_zero_node, inner);
5399 /* MIN (X, 0) == -1 -> X == -1 */
5400 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5403 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5404 /* MAX (X, 0) > 0 -> X > 0
5405 MAX (X, 0) > 5 -> X > 5 */
5406 return fold_build2 (GT_EXPR, type, inner, comp_const);
5408 else if (op_code == MAX_EXPR)
5409 /* MAX (X, 0) > -1 -> true */
5410 return omit_one_operand (type, integer_one_node, inner);
5412 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5413 /* MIN (X, 0) > 0 -> false
5414 MIN (X, 0) > 5 -> false */
5415 return omit_one_operand (type, integer_zero_node, inner);
5418 /* MIN (X, 0) > -1 -> X > -1 */
5419 return fold_build2 (GT_EXPR, type, inner, comp_const);
5426 /* T is an integer expression that is being multiplied, divided, or taken a
5427 modulus (CODE says which and what kind of divide or modulus) by a
5428 constant C. See if we can eliminate that operation by folding it with
5429 other operations already in T. WIDE_TYPE, if non-null, is a type that
5430 should be used for the computation if wider than our type.
5432 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5433 (X * 2) + (Y * 4). We must, however, be assured that either the original
5434 expression would not overflow or that overflow is undefined for the type
5435 in the language in question.
5437 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5438 the machine has a multiply-accumulate insn or that this is part of an
5439 addressing calculation.
5441 If we return a non-null expression, it is an equivalent form of the
5442 original computation, but need not be in the original type. */
5445 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5447 /* To avoid exponential search depth, refuse to allow recursion past
5448 three levels. Beyond that (1) it's highly unlikely that we'll find
5449 something interesting and (2) we've probably processed it before
5450 when we built the inner expression. */
5459 ret = extract_muldiv_1 (t, c, code, wide_type);
5466 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5468 tree type = TREE_TYPE (t);
5469 enum tree_code tcode = TREE_CODE (t);
5470 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5471 > GET_MODE_SIZE (TYPE_MODE (type)))
5472 ? wide_type : type);
5474 int same_p = tcode == code;
5475 tree op0 = NULL_TREE, op1 = NULL_TREE;
5477 /* Don't deal with constants of zero here; they confuse the code below. */
5478 if (integer_zerop (c))
5481 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5482 op0 = TREE_OPERAND (t, 0);
5484 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5485 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5487 /* Note that we need not handle conditional operations here since fold
5488 already handles those cases. So just do arithmetic here. */
5492 /* For a constant, we can always simplify if we are a multiply
5493 or (for divide and modulus) if it is a multiple of our constant. */
5494 if (code == MULT_EXPR
5495 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5496 return const_binop (code, fold_convert (ctype, t),
5497 fold_convert (ctype, c), 0);
5500 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5501 /* If op0 is an expression ... */
5502 if ((COMPARISON_CLASS_P (op0)
5503 || UNARY_CLASS_P (op0)
5504 || BINARY_CLASS_P (op0)
5505 || EXPRESSION_CLASS_P (op0))
5506 /* ... and is unsigned, and its type is smaller than ctype,
5507 then we cannot pass through as widening. */
5508 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5509 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5510 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5511 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5512 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5513 /* ... or this is a truncation (t is narrower than op0),
5514 then we cannot pass through this narrowing. */
5515 || (GET_MODE_SIZE (TYPE_MODE (type))
5516 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5517 /* ... or signedness changes for division or modulus,
5518 then we cannot pass through this conversion. */
5519 || (code != MULT_EXPR
5520 && (TYPE_UNSIGNED (ctype)
5521 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5524 /* Pass the constant down and see if we can make a simplification. If
5525 we can, replace this expression with the inner simplification for
5526 possible later conversion to our or some other type. */
5527 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5528 && TREE_CODE (t2) == INTEGER_CST
5529 && ! TREE_CONSTANT_OVERFLOW (t2)
5530 && (0 != (t1 = extract_muldiv (op0, t2, code,
5532 ? ctype : NULL_TREE))))
5537 /* If widening the type changes it from signed to unsigned, then we
5538 must avoid building ABS_EXPR itself as unsigned. */
5539 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5541 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5542 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5544 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5545 return fold_convert (ctype, t1);
5551 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5552 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5555 case MIN_EXPR: case MAX_EXPR:
5556 /* If widening the type changes the signedness, then we can't perform
5557 this optimization as that changes the result. */
5558 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5561 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5562 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5563 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5565 if (tree_int_cst_sgn (c) < 0)
5566 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5568 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5569 fold_convert (ctype, t2));
5573 case LSHIFT_EXPR: case RSHIFT_EXPR:
5574 /* If the second operand is constant, this is a multiplication
5575 or floor division, by a power of two, so we can treat it that
5576 way unless the multiplier or divisor overflows. Signed
5577 left-shift overflow is implementation-defined rather than
5578 undefined in C90, so do not convert signed left shift into
5580 if (TREE_CODE (op1) == INTEGER_CST
5581 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5582 /* const_binop may not detect overflow correctly,
5583 so check for it explicitly here. */
5584 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5585 && TREE_INT_CST_HIGH (op1) == 0
5586 && 0 != (t1 = fold_convert (ctype,
5587 const_binop (LSHIFT_EXPR,
5590 && ! TREE_OVERFLOW (t1))
5591 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5592 ? MULT_EXPR : FLOOR_DIV_EXPR,
5593 ctype, fold_convert (ctype, op0), t1),
5594 c, code, wide_type);
5597 case PLUS_EXPR: case MINUS_EXPR:
5598 /* See if we can eliminate the operation on both sides. If we can, we
5599 can return a new PLUS or MINUS. If we can't, the only remaining
5600 cases where we can do anything are if the second operand is a
5602 t1 = extract_muldiv (op0, c, code, wide_type);
5603 t2 = extract_muldiv (op1, c, code, wide_type);
5604 if (t1 != 0 && t2 != 0
5605 && (code == MULT_EXPR
5606 /* If not multiplication, we can only do this if both operands
5607 are divisible by c. */
5608 || (multiple_of_p (ctype, op0, c)
5609 && multiple_of_p (ctype, op1, c))))
5610 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5611 fold_convert (ctype, t2));
5613 /* If this was a subtraction, negate OP1 and set it to be an addition.
5614 This simplifies the logic below. */
5615 if (tcode == MINUS_EXPR)
5616 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5618 if (TREE_CODE (op1) != INTEGER_CST)
5621 /* If either OP1 or C are negative, this optimization is not safe for
5622 some of the division and remainder types while for others we need
5623 to change the code. */
5624 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5626 if (code == CEIL_DIV_EXPR)
5627 code = FLOOR_DIV_EXPR;
5628 else if (code == FLOOR_DIV_EXPR)
5629 code = CEIL_DIV_EXPR;
5630 else if (code != MULT_EXPR
5631 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5635 /* If it's a multiply or a division/modulus operation of a multiple
5636 of our constant, do the operation and verify it doesn't overflow. */
5637 if (code == MULT_EXPR
5638 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5640 op1 = const_binop (code, fold_convert (ctype, op1),
5641 fold_convert (ctype, c), 0);
5642 /* We allow the constant to overflow with wrapping semantics. */
5644 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5650 /* If we have an unsigned type is not a sizetype, we cannot widen
5651 the operation since it will change the result if the original
5652 computation overflowed. */
5653 if (TYPE_UNSIGNED (ctype)
5654 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5658 /* If we were able to eliminate our operation from the first side,
5659 apply our operation to the second side and reform the PLUS. */
5660 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5661 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5663 /* The last case is if we are a multiply. In that case, we can
5664 apply the distributive law to commute the multiply and addition
5665 if the multiplication of the constants doesn't overflow. */
5666 if (code == MULT_EXPR)
5667 return fold_build2 (tcode, ctype,
5668 fold_build2 (code, ctype,
5669 fold_convert (ctype, op0),
5670 fold_convert (ctype, c)),
5676 /* We have a special case here if we are doing something like
5677 (C * 8) % 4 since we know that's zero. */
5678 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5679 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5680 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5681 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5682 return omit_one_operand (type, integer_zero_node, op0);
5684 /* ... fall through ... */
5686 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5687 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5688 /* If we can extract our operation from the LHS, do so and return a
5689 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5690 do something only if the second operand is a constant. */
5692 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5693 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5694 fold_convert (ctype, op1));
5695 else if (tcode == MULT_EXPR && code == MULT_EXPR
5696 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5697 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5698 fold_convert (ctype, t1));
5699 else if (TREE_CODE (op1) != INTEGER_CST)
5702 /* If these are the same operation types, we can associate them
5703 assuming no overflow. */
5705 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5706 fold_convert (ctype, c), 0))
5707 && ! TREE_OVERFLOW (t1))
5708 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5710 /* If these operations "cancel" each other, we have the main
5711 optimizations of this pass, which occur when either constant is a
5712 multiple of the other, in which case we replace this with either an
5713 operation or CODE or TCODE.
5715 If we have an unsigned type that is not a sizetype, we cannot do
5716 this since it will change the result if the original computation
5718 if ((! TYPE_UNSIGNED (ctype)
5719 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5721 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5722 || (tcode == MULT_EXPR
5723 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5724 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5726 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5727 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5728 fold_convert (ctype,
5729 const_binop (TRUNC_DIV_EXPR,
5731 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5732 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5733 fold_convert (ctype,
5734 const_binop (TRUNC_DIV_EXPR,
5746 /* Return a node which has the indicated constant VALUE (either 0 or
5747 1), and is of the indicated TYPE. */
5750 constant_boolean_node (int value, tree type)
5752 if (type == integer_type_node)
5753 return value ? integer_one_node : integer_zero_node;
5754 else if (type == boolean_type_node)
5755 return value ? boolean_true_node : boolean_false_node;
5757 return build_int_cst (type, value);
5761 /* Return true if expr looks like an ARRAY_REF and set base and
5762 offset to the appropriate trees. If there is no offset,
5763 offset is set to NULL_TREE. Base will be canonicalized to
5764 something you can get the element type from using
5765 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5766 in bytes to the base. */
5769 extract_array_ref (tree expr, tree *base, tree *offset)
5771 /* One canonical form is a PLUS_EXPR with the first
5772 argument being an ADDR_EXPR with a possible NOP_EXPR
5774 if (TREE_CODE (expr) == PLUS_EXPR)
5776 tree op0 = TREE_OPERAND (expr, 0);
5777 tree inner_base, dummy1;
5778 /* Strip NOP_EXPRs here because the C frontends and/or
5779 folders present us (int *)&x.a + 4B possibly. */
5781 if (extract_array_ref (op0, &inner_base, &dummy1))
5784 if (dummy1 == NULL_TREE)
5785 *offset = TREE_OPERAND (expr, 1);
5787 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5788 dummy1, TREE_OPERAND (expr, 1));
5792 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5793 which we transform into an ADDR_EXPR with appropriate
5794 offset. For other arguments to the ADDR_EXPR we assume
5795 zero offset and as such do not care about the ADDR_EXPR
5796 type and strip possible nops from it. */
5797 else if (TREE_CODE (expr) == ADDR_EXPR)
5799 tree op0 = TREE_OPERAND (expr, 0);
5800 if (TREE_CODE (op0) == ARRAY_REF)
5802 tree idx = TREE_OPERAND (op0, 1);
5803 *base = TREE_OPERAND (op0, 0);
5804 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5805 array_ref_element_size (op0));
5809 /* Handle array-to-pointer decay as &a. */
5810 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5811 *base = TREE_OPERAND (expr, 0);
5814 *offset = NULL_TREE;
5818 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5819 else if (SSA_VAR_P (expr)
5820 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5823 *offset = NULL_TREE;
5831 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5832 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5833 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5834 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5835 COND is the first argument to CODE; otherwise (as in the example
5836 given here), it is the second argument. TYPE is the type of the
5837 original expression. Return NULL_TREE if no simplification is
5841 fold_binary_op_with_conditional_arg (enum tree_code code,
5842 tree type, tree op0, tree op1,
5843 tree cond, tree arg, int cond_first_p)
5845 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5846 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5847 tree test, true_value, false_value;
5848 tree lhs = NULL_TREE;
5849 tree rhs = NULL_TREE;
5851 /* This transformation is only worthwhile if we don't have to wrap
5852 arg in a SAVE_EXPR, and the operation can be simplified on at least
5853 one of the branches once its pushed inside the COND_EXPR. */
5854 if (!TREE_CONSTANT (arg))
5857 if (TREE_CODE (cond) == COND_EXPR)
5859 test = TREE_OPERAND (cond, 0);
5860 true_value = TREE_OPERAND (cond, 1);
5861 false_value = TREE_OPERAND (cond, 2);
5862 /* If this operand throws an expression, then it does not make
5863 sense to try to perform a logical or arithmetic operation
5865 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5867 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5872 tree testtype = TREE_TYPE (cond);
5874 true_value = constant_boolean_node (true, testtype);
5875 false_value = constant_boolean_node (false, testtype);
5878 arg = fold_convert (arg_type, arg);
5881 true_value = fold_convert (cond_type, true_value);
5883 lhs = fold_build2 (code, type, true_value, arg);
5885 lhs = fold_build2 (code, type, arg, true_value);
5889 false_value = fold_convert (cond_type, false_value);
5891 rhs = fold_build2 (code, type, false_value, arg);
5893 rhs = fold_build2 (code, type, arg, false_value);
5896 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5897 return fold_convert (type, test);
5901 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5903 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5904 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5905 ADDEND is the same as X.
5907 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5908 and finite. The problematic cases are when X is zero, and its mode
5909 has signed zeros. In the case of rounding towards -infinity,
5910 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5911 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5914 fold_real_zero_addition_p (tree type, tree addend, int negate)
5916 if (!real_zerop (addend))
5919 /* Don't allow the fold with -fsignaling-nans. */
5920 if (HONOR_SNANS (TYPE_MODE (type)))
5923 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5924 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5927 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5928 if (TREE_CODE (addend) == REAL_CST
5929 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5932 /* The mode has signed zeros, and we have to honor their sign.
5933 In this situation, there is only one case we can return true for.
5934 X - 0 is the same as X unless rounding towards -infinity is
5936 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5939 /* Subroutine of fold() that checks comparisons of built-in math
5940 functions against real constants.
5942 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5943 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5944 is the type of the result and ARG0 and ARG1 are the operands of the
5945 comparison. ARG1 must be a TREE_REAL_CST.
5947 The function returns the constant folded tree if a simplification
5948 can be made, and NULL_TREE otherwise. */
5951 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5952 tree type, tree arg0, tree arg1)
5956 if (BUILTIN_SQRT_P (fcode))
5958 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5959 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5961 c = TREE_REAL_CST (arg1);
5962 if (REAL_VALUE_NEGATIVE (c))
5964 /* sqrt(x) < y is always false, if y is negative. */
5965 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5966 return omit_one_operand (type, integer_zero_node, arg);
5968 /* sqrt(x) > y is always true, if y is negative and we
5969 don't care about NaNs, i.e. negative values of x. */
5970 if (code == NE_EXPR || !HONOR_NANS (mode))
5971 return omit_one_operand (type, integer_one_node, arg);
5973 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5974 return fold_build2 (GE_EXPR, type, arg,
5975 build_real (TREE_TYPE (arg), dconst0));
5977 else if (code == GT_EXPR || code == GE_EXPR)
5981 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5982 real_convert (&c2, mode, &c2);
5984 if (REAL_VALUE_ISINF (c2))
5986 /* sqrt(x) > y is x == +Inf, when y is very large. */
5987 if (HONOR_INFINITIES (mode))
5988 return fold_build2 (EQ_EXPR, type, arg,
5989 build_real (TREE_TYPE (arg), c2));
5991 /* sqrt(x) > y is always false, when y is very large
5992 and we don't care about infinities. */
5993 return omit_one_operand (type, integer_zero_node, arg);
5996 /* sqrt(x) > c is the same as x > c*c. */
5997 return fold_build2 (code, type, arg,
5998 build_real (TREE_TYPE (arg), c2));
6000 else if (code == LT_EXPR || code == LE_EXPR)
6004 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6005 real_convert (&c2, mode, &c2);
6007 if (REAL_VALUE_ISINF (c2))
6009 /* sqrt(x) < y is always true, when y is a very large
6010 value and we don't care about NaNs or Infinities. */
6011 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6012 return omit_one_operand (type, integer_one_node, arg);
6014 /* sqrt(x) < y is x != +Inf when y is very large and we
6015 don't care about NaNs. */
6016 if (! HONOR_NANS (mode))
6017 return fold_build2 (NE_EXPR, type, arg,
6018 build_real (TREE_TYPE (arg), c2));
6020 /* sqrt(x) < y is x >= 0 when y is very large and we
6021 don't care about Infinities. */
6022 if (! HONOR_INFINITIES (mode))
6023 return fold_build2 (GE_EXPR, type, arg,
6024 build_real (TREE_TYPE (arg), dconst0));
6026 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6027 if (lang_hooks.decls.global_bindings_p () != 0
6028 || CONTAINS_PLACEHOLDER_P (arg))
6031 arg = save_expr (arg);
6032 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6033 fold_build2 (GE_EXPR, type, arg,
6034 build_real (TREE_TYPE (arg),
6036 fold_build2 (NE_EXPR, type, arg,
6037 build_real (TREE_TYPE (arg),
6041 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6042 if (! HONOR_NANS (mode))
6043 return fold_build2 (code, type, arg,
6044 build_real (TREE_TYPE (arg), c2));
6046 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6047 if (lang_hooks.decls.global_bindings_p () == 0
6048 && ! CONTAINS_PLACEHOLDER_P (arg))
6050 arg = save_expr (arg);
6051 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6052 fold_build2 (GE_EXPR, type, arg,
6053 build_real (TREE_TYPE (arg),
6055 fold_build2 (code, type, arg,
6056 build_real (TREE_TYPE (arg),
6065 /* Subroutine of fold() that optimizes comparisons against Infinities,
6066 either +Inf or -Inf.
6068 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6069 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6070 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6072 The function returns the constant folded tree if a simplification
6073 can be made, and NULL_TREE otherwise. */
6076 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6078 enum machine_mode mode;
6079 REAL_VALUE_TYPE max;
6083 mode = TYPE_MODE (TREE_TYPE (arg0));
6085 /* For negative infinity swap the sense of the comparison. */
6086 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6088 code = swap_tree_comparison (code);
6093 /* x > +Inf is always false, if with ignore sNANs. */
6094 if (HONOR_SNANS (mode))
6096 return omit_one_operand (type, integer_zero_node, arg0);
6099 /* x <= +Inf is always true, if we don't case about NaNs. */
6100 if (! HONOR_NANS (mode))
6101 return omit_one_operand (type, integer_one_node, arg0);
6103 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6104 if (lang_hooks.decls.global_bindings_p () == 0
6105 && ! CONTAINS_PLACEHOLDER_P (arg0))
6107 arg0 = save_expr (arg0);
6108 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6114 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6115 real_maxval (&max, neg, mode);
6116 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6117 arg0, build_real (TREE_TYPE (arg0), max));
6120 /* x < +Inf is always equal to x <= DBL_MAX. */
6121 real_maxval (&max, neg, mode);
6122 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6123 arg0, build_real (TREE_TYPE (arg0), max));
6126 /* x != +Inf is always equal to !(x > DBL_MAX). */
6127 real_maxval (&max, neg, mode);
6128 if (! HONOR_NANS (mode))
6129 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6130 arg0, build_real (TREE_TYPE (arg0), max));
6132 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6133 arg0, build_real (TREE_TYPE (arg0), max));
6134 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6143 /* Subroutine of fold() that optimizes comparisons of a division by
6144 a nonzero integer constant against an integer constant, i.e.
6147 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6148 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6149 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6151 The function returns the constant folded tree if a simplification
6152 can be made, and NULL_TREE otherwise. */
6155 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6157 tree prod, tmp, hi, lo;
6158 tree arg00 = TREE_OPERAND (arg0, 0);
6159 tree arg01 = TREE_OPERAND (arg0, 1);
6160 unsigned HOST_WIDE_INT lpart;
6161 HOST_WIDE_INT hpart;
6162 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6166 /* We have to do this the hard way to detect unsigned overflow.
6167 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6168 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6169 TREE_INT_CST_HIGH (arg01),
6170 TREE_INT_CST_LOW (arg1),
6171 TREE_INT_CST_HIGH (arg1),
6172 &lpart, &hpart, unsigned_p);
6173 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6174 -1, overflow, false);
6175 neg_overflow = false;
6179 tmp = int_const_binop (MINUS_EXPR, arg01,
6180 build_int_cst (TREE_TYPE (arg01), 1), 0);
6183 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6184 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6185 TREE_INT_CST_HIGH (prod),
6186 TREE_INT_CST_LOW (tmp),
6187 TREE_INT_CST_HIGH (tmp),
6188 &lpart, &hpart, unsigned_p);
6189 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6190 -1, overflow | TREE_OVERFLOW (prod),
6191 TREE_CONSTANT_OVERFLOW (prod));
6193 else if (tree_int_cst_sgn (arg01) >= 0)
6195 tmp = int_const_binop (MINUS_EXPR, arg01,
6196 build_int_cst (TREE_TYPE (arg01), 1), 0);
6197 switch (tree_int_cst_sgn (arg1))
6200 neg_overflow = true;
6201 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6206 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6211 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6221 /* A negative divisor reverses the relational operators. */
6222 code = swap_tree_comparison (code);
6224 tmp = int_const_binop (PLUS_EXPR, arg01,
6225 build_int_cst (TREE_TYPE (arg01), 1), 0);
6226 switch (tree_int_cst_sgn (arg1))
6229 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6234 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6239 neg_overflow = true;
6240 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6252 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6253 return omit_one_operand (type, integer_zero_node, arg00);
6254 if (TREE_OVERFLOW (hi))
6255 return fold_build2 (GE_EXPR, type, arg00, lo);
6256 if (TREE_OVERFLOW (lo))
6257 return fold_build2 (LE_EXPR, type, arg00, hi);
6258 return build_range_check (type, arg00, 1, lo, hi);
6261 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6262 return omit_one_operand (type, integer_one_node, arg00);
6263 if (TREE_OVERFLOW (hi))
6264 return fold_build2 (LT_EXPR, type, arg00, lo);
6265 if (TREE_OVERFLOW (lo))
6266 return fold_build2 (GT_EXPR, type, arg00, hi);
6267 return build_range_check (type, arg00, 0, lo, hi);
6270 if (TREE_OVERFLOW (lo))
6272 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6273 return omit_one_operand (type, tmp, arg00);
6275 return fold_build2 (LT_EXPR, type, arg00, lo);
6278 if (TREE_OVERFLOW (hi))
6280 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6281 return omit_one_operand (type, tmp, arg00);
6283 return fold_build2 (LE_EXPR, type, arg00, hi);
6286 if (TREE_OVERFLOW (hi))
6288 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6289 return omit_one_operand (type, tmp, arg00);
6291 return fold_build2 (GT_EXPR, type, arg00, hi);
6294 if (TREE_OVERFLOW (lo))
6296 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6297 return omit_one_operand (type, tmp, arg00);
6299 return fold_build2 (GE_EXPR, type, arg00, lo);
6309 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6310 equality/inequality test, then return a simplified form of the test
6311 using a sign testing. Otherwise return NULL. TYPE is the desired
6315 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6318 /* If this is testing a single bit, we can optimize the test. */
6319 if ((code == NE_EXPR || code == EQ_EXPR)
6320 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6321 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6323 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6324 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6325 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6327 if (arg00 != NULL_TREE
6328 /* This is only a win if casting to a signed type is cheap,
6329 i.e. when arg00's type is not a partial mode. */
6330 && TYPE_PRECISION (TREE_TYPE (arg00))
6331 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6333 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6334 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6335 result_type, fold_convert (stype, arg00),
6336 build_int_cst (stype, 0));
6343 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6344 equality/inequality test, then return a simplified form of
6345 the test using shifts and logical operations. Otherwise return
6346 NULL. TYPE is the desired result type. */
6349 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6352 /* If this is testing a single bit, we can optimize the test. */
6353 if ((code == NE_EXPR || code == EQ_EXPR)
6354 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6355 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6357 tree inner = TREE_OPERAND (arg0, 0);
6358 tree type = TREE_TYPE (arg0);
6359 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6360 enum machine_mode operand_mode = TYPE_MODE (type);
6362 tree signed_type, unsigned_type, intermediate_type;
6365 /* First, see if we can fold the single bit test into a sign-bit
6367 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6372 /* Otherwise we have (A & C) != 0 where C is a single bit,
6373 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6374 Similarly for (A & C) == 0. */
6376 /* If INNER is a right shift of a constant and it plus BITNUM does
6377 not overflow, adjust BITNUM and INNER. */
6378 if (TREE_CODE (inner) == RSHIFT_EXPR
6379 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6380 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6381 && bitnum < TYPE_PRECISION (type)
6382 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6383 bitnum - TYPE_PRECISION (type)))
6385 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6386 inner = TREE_OPERAND (inner, 0);
6389 /* If we are going to be able to omit the AND below, we must do our
6390 operations as unsigned. If we must use the AND, we have a choice.
6391 Normally unsigned is faster, but for some machines signed is. */
6392 #ifdef LOAD_EXTEND_OP
6393 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6394 && !flag_syntax_only) ? 0 : 1;
6399 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6400 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6401 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6402 inner = fold_convert (intermediate_type, inner);
6405 inner = build2 (RSHIFT_EXPR, intermediate_type,
6406 inner, size_int (bitnum));
6408 one = build_int_cst (intermediate_type, 1);
6410 if (code == EQ_EXPR)
6411 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6413 /* Put the AND last so it can combine with more things. */
6414 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6416 /* Make sure to return the proper type. */
6417 inner = fold_convert (result_type, inner);
6424 /* Check whether we are allowed to reorder operands arg0 and arg1,
6425 such that the evaluation of arg1 occurs before arg0. */
6428 reorder_operands_p (tree arg0, tree arg1)
6430 if (! flag_evaluation_order)
6432 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6434 return ! TREE_SIDE_EFFECTS (arg0)
6435 && ! TREE_SIDE_EFFECTS (arg1);
6438 /* Test whether it is preferable two swap two operands, ARG0 and
6439 ARG1, for example because ARG0 is an integer constant and ARG1
6440 isn't. If REORDER is true, only recommend swapping if we can
6441 evaluate the operands in reverse order. */
6444 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6446 STRIP_SIGN_NOPS (arg0);
6447 STRIP_SIGN_NOPS (arg1);
6449 if (TREE_CODE (arg1) == INTEGER_CST)
6451 if (TREE_CODE (arg0) == INTEGER_CST)
6454 if (TREE_CODE (arg1) == REAL_CST)
6456 if (TREE_CODE (arg0) == REAL_CST)
6459 if (TREE_CODE (arg1) == COMPLEX_CST)
6461 if (TREE_CODE (arg0) == COMPLEX_CST)
6464 if (TREE_CONSTANT (arg1))
6466 if (TREE_CONSTANT (arg0))
6472 if (reorder && flag_evaluation_order
6473 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6481 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6482 for commutative and comparison operators. Ensuring a canonical
6483 form allows the optimizers to find additional redundancies without
6484 having to explicitly check for both orderings. */
6485 if (TREE_CODE (arg0) == SSA_NAME
6486 && TREE_CODE (arg1) == SSA_NAME
6487 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6493 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6494 ARG0 is extended to a wider type. */
6497 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6499 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6501 tree shorter_type, outer_type;
6505 if (arg0_unw == arg0)
6507 shorter_type = TREE_TYPE (arg0_unw);
6509 #ifdef HAVE_canonicalize_funcptr_for_compare
6510 /* Disable this optimization if we're casting a function pointer
6511 type on targets that require function pointer canonicalization. */
6512 if (HAVE_canonicalize_funcptr_for_compare
6513 && TREE_CODE (shorter_type) == POINTER_TYPE
6514 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6518 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6521 arg1_unw = get_unwidened (arg1, shorter_type);
6523 /* If possible, express the comparison in the shorter mode. */
6524 if ((code == EQ_EXPR || code == NE_EXPR
6525 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6526 && (TREE_TYPE (arg1_unw) == shorter_type
6527 || (TREE_CODE (arg1_unw) == INTEGER_CST
6528 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6529 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6530 && int_fits_type_p (arg1_unw, shorter_type))))
6531 return fold_build2 (code, type, arg0_unw,
6532 fold_convert (shorter_type, arg1_unw));
6534 if (TREE_CODE (arg1_unw) != INTEGER_CST
6535 || TREE_CODE (shorter_type) != INTEGER_TYPE
6536 || !int_fits_type_p (arg1_unw, shorter_type))
6539 /* If we are comparing with the integer that does not fit into the range
6540 of the shorter type, the result is known. */
6541 outer_type = TREE_TYPE (arg1_unw);
6542 min = lower_bound_in_type (outer_type, shorter_type);
6543 max = upper_bound_in_type (outer_type, shorter_type);
6545 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6547 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6554 return omit_one_operand (type, integer_zero_node, arg0);
6559 return omit_one_operand (type, integer_one_node, arg0);
6565 return omit_one_operand (type, integer_one_node, arg0);
6567 return omit_one_operand (type, integer_zero_node, arg0);
6572 return omit_one_operand (type, integer_zero_node, arg0);
6574 return omit_one_operand (type, integer_one_node, arg0);
6583 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6584 ARG0 just the signedness is changed. */
6587 fold_sign_changed_comparison (enum tree_code code, tree type,
6588 tree arg0, tree arg1)
6591 tree inner_type, outer_type;
6593 if (TREE_CODE (arg0) != NOP_EXPR
6594 && TREE_CODE (arg0) != CONVERT_EXPR)
6597 outer_type = TREE_TYPE (arg0);
6598 arg0_inner = TREE_OPERAND (arg0, 0);
6599 inner_type = TREE_TYPE (arg0_inner);
6601 #ifdef HAVE_canonicalize_funcptr_for_compare
6602 /* Disable this optimization if we're casting a function pointer
6603 type on targets that require function pointer canonicalization. */
6604 if (HAVE_canonicalize_funcptr_for_compare
6605 && TREE_CODE (inner_type) == POINTER_TYPE
6606 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6610 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6613 if (TREE_CODE (arg1) != INTEGER_CST
6614 && !((TREE_CODE (arg1) == NOP_EXPR
6615 || TREE_CODE (arg1) == CONVERT_EXPR)
6616 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6619 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6624 if (TREE_CODE (arg1) == INTEGER_CST)
6625 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6626 TREE_INT_CST_HIGH (arg1), 0,
6627 TREE_OVERFLOW (arg1),
6628 TREE_CONSTANT_OVERFLOW (arg1));
6630 arg1 = fold_convert (inner_type, arg1);
6632 return fold_build2 (code, type, arg0_inner, arg1);
6635 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6636 step of the array. Reconstructs s and delta in the case of s * delta
6637 being an integer constant (and thus already folded).
6638 ADDR is the address. MULT is the multiplicative expression.
6639 If the function succeeds, the new address expression is returned. Otherwise
6640 NULL_TREE is returned. */
6643 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6645 tree s, delta, step;
6646 tree ref = TREE_OPERAND (addr, 0), pref;
6650 /* Canonicalize op1 into a possibly non-constant delta
6651 and an INTEGER_CST s. */
6652 if (TREE_CODE (op1) == MULT_EXPR)
6654 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6659 if (TREE_CODE (arg0) == INTEGER_CST)
6664 else if (TREE_CODE (arg1) == INTEGER_CST)
6672 else if (TREE_CODE (op1) == INTEGER_CST)
6679 /* Simulate we are delta * 1. */
6681 s = integer_one_node;
6684 for (;; ref = TREE_OPERAND (ref, 0))
6686 if (TREE_CODE (ref) == ARRAY_REF)
6688 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6692 step = array_ref_element_size (ref);
6693 if (TREE_CODE (step) != INTEGER_CST)
6698 if (! tree_int_cst_equal (step, s))
6703 /* Try if delta is a multiple of step. */
6704 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6713 if (!handled_component_p (ref))
6717 /* We found the suitable array reference. So copy everything up to it,
6718 and replace the index. */
6720 pref = TREE_OPERAND (addr, 0);
6721 ret = copy_node (pref);
6726 pref = TREE_OPERAND (pref, 0);
6727 TREE_OPERAND (pos, 0) = copy_node (pref);
6728 pos = TREE_OPERAND (pos, 0);
6731 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6732 fold_convert (itype,
6733 TREE_OPERAND (pos, 1)),
6734 fold_convert (itype, delta));
6736 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6740 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6741 means A >= Y && A != MAX, but in this case we know that
6742 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6745 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6747 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6749 if (TREE_CODE (bound) == LT_EXPR)
6750 a = TREE_OPERAND (bound, 0);
6751 else if (TREE_CODE (bound) == GT_EXPR)
6752 a = TREE_OPERAND (bound, 1);
6756 typea = TREE_TYPE (a);
6757 if (!INTEGRAL_TYPE_P (typea)
6758 && !POINTER_TYPE_P (typea))
6761 if (TREE_CODE (ineq) == LT_EXPR)
6763 a1 = TREE_OPERAND (ineq, 1);
6764 y = TREE_OPERAND (ineq, 0);
6766 else if (TREE_CODE (ineq) == GT_EXPR)
6768 a1 = TREE_OPERAND (ineq, 0);
6769 y = TREE_OPERAND (ineq, 1);
6774 if (TREE_TYPE (a1) != typea)
6777 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6778 if (!integer_onep (diff))
6781 return fold_build2 (GE_EXPR, type, a, y);
6784 /* Fold a sum or difference of at least one multiplication.
6785 Returns the folded tree or NULL if no simplification could be made. */
6788 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6790 tree arg00, arg01, arg10, arg11;
6791 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6793 /* (A * C) +- (B * C) -> (A+-B) * C.
6794 (A * C) +- A -> A * (C+-1).
6795 We are most concerned about the case where C is a constant,
6796 but other combinations show up during loop reduction. Since
6797 it is not difficult, try all four possibilities. */
6799 if (TREE_CODE (arg0) == MULT_EXPR)
6801 arg00 = TREE_OPERAND (arg0, 0);
6802 arg01 = TREE_OPERAND (arg0, 1);
6807 arg01 = build_one_cst (type);
6809 if (TREE_CODE (arg1) == MULT_EXPR)
6811 arg10 = TREE_OPERAND (arg1, 0);
6812 arg11 = TREE_OPERAND (arg1, 1);
6817 arg11 = build_one_cst (type);
6821 if (operand_equal_p (arg01, arg11, 0))
6822 same = arg01, alt0 = arg00, alt1 = arg10;
6823 else if (operand_equal_p (arg00, arg10, 0))
6824 same = arg00, alt0 = arg01, alt1 = arg11;
6825 else if (operand_equal_p (arg00, arg11, 0))
6826 same = arg00, alt0 = arg01, alt1 = arg10;
6827 else if (operand_equal_p (arg01, arg10, 0))
6828 same = arg01, alt0 = arg00, alt1 = arg11;
6830 /* No identical multiplicands; see if we can find a common
6831 power-of-two factor in non-power-of-two multiplies. This
6832 can help in multi-dimensional array access. */
6833 else if (host_integerp (arg01, 0)
6834 && host_integerp (arg11, 0))
6836 HOST_WIDE_INT int01, int11, tmp;
6839 int01 = TREE_INT_CST_LOW (arg01);
6840 int11 = TREE_INT_CST_LOW (arg11);
6842 /* Move min of absolute values to int11. */
6843 if ((int01 >= 0 ? int01 : -int01)
6844 < (int11 >= 0 ? int11 : -int11))
6846 tmp = int01, int01 = int11, int11 = tmp;
6847 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6854 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6856 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6857 build_int_cst (TREE_TYPE (arg00),
6862 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6867 return fold_build2 (MULT_EXPR, type,
6868 fold_build2 (code, type,
6869 fold_convert (type, alt0),
6870 fold_convert (type, alt1)),
6871 fold_convert (type, same));
6876 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6877 specified by EXPR into the buffer PTR of length LEN bytes.
6878 Return the number of bytes placed in the buffer, or zero
6882 native_encode_int (tree expr, unsigned char *ptr, int len)
6884 tree type = TREE_TYPE (expr);
6885 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6886 int byte, offset, word, words;
6887 unsigned char value;
6889 if (total_bytes > len)
6891 words = total_bytes / UNITS_PER_WORD;
6893 for (byte = 0; byte < total_bytes; byte++)
6895 int bitpos = byte * BITS_PER_UNIT;
6896 if (bitpos < HOST_BITS_PER_WIDE_INT)
6897 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6899 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6900 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6902 if (total_bytes > UNITS_PER_WORD)
6904 word = byte / UNITS_PER_WORD;
6905 if (WORDS_BIG_ENDIAN)
6906 word = (words - 1) - word;
6907 offset = word * UNITS_PER_WORD;
6908 if (BYTES_BIG_ENDIAN)
6909 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6911 offset += byte % UNITS_PER_WORD;
6914 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6915 ptr[offset] = value;
6921 /* Subroutine of native_encode_expr. Encode the REAL_CST
6922 specified by EXPR into the buffer PTR of length LEN bytes.
6923 Return the number of bytes placed in the buffer, or zero
6927 native_encode_real (tree expr, unsigned char *ptr, int len)
6929 tree type = TREE_TYPE (expr);
6930 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6931 int byte, offset, word, words;
6932 unsigned char value;
6934 /* There are always 32 bits in each long, no matter the size of
6935 the hosts long. We handle floating point representations with
6939 if (total_bytes > len)
6941 words = total_bytes / UNITS_PER_WORD;
6943 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6945 for (byte = 0; byte < total_bytes; byte++)
6947 int bitpos = byte * BITS_PER_UNIT;
6948 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6950 if (total_bytes > UNITS_PER_WORD)
6952 word = byte / UNITS_PER_WORD;
6953 if (FLOAT_WORDS_BIG_ENDIAN)
6954 word = (words - 1) - word;
6955 offset = word * UNITS_PER_WORD;
6956 if (BYTES_BIG_ENDIAN)
6957 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6959 offset += byte % UNITS_PER_WORD;
6962 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6963 ptr[offset] = value;
6968 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6969 specified by EXPR into the buffer PTR of length LEN bytes.
6970 Return the number of bytes placed in the buffer, or zero
6974 native_encode_complex (tree expr, unsigned char *ptr, int len)
6979 part = TREE_REALPART (expr);
6980 rsize = native_encode_expr (part, ptr, len);
6983 part = TREE_IMAGPART (expr);
6984 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6987 return rsize + isize;
6991 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6992 specified by EXPR into the buffer PTR of length LEN bytes.
6993 Return the number of bytes placed in the buffer, or zero
6997 native_encode_vector (tree expr, unsigned char *ptr, int len)
6999 int i, size, offset, count;
7000 tree itype, elem, elements;
7003 elements = TREE_VECTOR_CST_ELTS (expr);
7004 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7005 itype = TREE_TYPE (TREE_TYPE (expr));
7006 size = GET_MODE_SIZE (TYPE_MODE (itype));
7007 for (i = 0; i < count; i++)
7011 elem = TREE_VALUE (elements);
7012 elements = TREE_CHAIN (elements);
7019 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7024 if (offset + size > len)
7026 memset (ptr+offset, 0, size);
7034 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7035 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7036 buffer PTR of length LEN bytes. Return the number of bytes
7037 placed in the buffer, or zero upon failure. */
7040 native_encode_expr (tree expr, unsigned char *ptr, int len)
7042 switch (TREE_CODE (expr))
7045 return native_encode_int (expr, ptr, len);
7048 return native_encode_real (expr, ptr, len);
7051 return native_encode_complex (expr, ptr, len);
7054 return native_encode_vector (expr, ptr, len);
7062 /* Subroutine of native_interpret_expr. Interpret the contents of
7063 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7064 If the buffer cannot be interpreted, return NULL_TREE. */
7067 native_interpret_int (tree type, unsigned char *ptr, int len)
7069 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7070 int byte, offset, word, words;
7071 unsigned char value;
7072 unsigned int HOST_WIDE_INT lo = 0;
7073 HOST_WIDE_INT hi = 0;
7075 if (total_bytes > len)
7077 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7079 words = total_bytes / UNITS_PER_WORD;
7081 for (byte = 0; byte < total_bytes; byte++)
7083 int bitpos = byte * BITS_PER_UNIT;
7084 if (total_bytes > UNITS_PER_WORD)
7086 word = byte / UNITS_PER_WORD;
7087 if (WORDS_BIG_ENDIAN)
7088 word = (words - 1) - word;
7089 offset = word * UNITS_PER_WORD;
7090 if (BYTES_BIG_ENDIAN)
7091 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7093 offset += byte % UNITS_PER_WORD;
7096 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7097 value = ptr[offset];
7099 if (bitpos < HOST_BITS_PER_WIDE_INT)
7100 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7102 hi |= (unsigned HOST_WIDE_INT) value
7103 << (bitpos - HOST_BITS_PER_WIDE_INT);
7106 return build_int_cst_wide_type (type, lo, hi);
7110 /* Subroutine of native_interpret_expr. Interpret the contents of
7111 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7112 If the buffer cannot be interpreted, return NULL_TREE. */
7115 native_interpret_real (tree type, unsigned char *ptr, int len)
7117 enum machine_mode mode = TYPE_MODE (type);
7118 int total_bytes = GET_MODE_SIZE (mode);
7119 int byte, offset, word, words;
7120 unsigned char value;
7121 /* There are always 32 bits in each long, no matter the size of
7122 the hosts long. We handle floating point representations with
7127 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7128 if (total_bytes > len || total_bytes > 24)
7130 words = total_bytes / UNITS_PER_WORD;
7132 memset (tmp, 0, sizeof (tmp));
7133 for (byte = 0; byte < total_bytes; byte++)
7135 int bitpos = byte * BITS_PER_UNIT;
7136 if (total_bytes > UNITS_PER_WORD)
7138 word = byte / UNITS_PER_WORD;
7139 if (FLOAT_WORDS_BIG_ENDIAN)
7140 word = (words - 1) - word;
7141 offset = word * UNITS_PER_WORD;
7142 if (BYTES_BIG_ENDIAN)
7143 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7145 offset += byte % UNITS_PER_WORD;
7148 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7149 value = ptr[offset];
7151 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7154 real_from_target (&r, tmp, mode);
7155 return build_real (type, r);
7159 /* Subroutine of native_interpret_expr. Interpret the contents of
7160 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7161 If the buffer cannot be interpreted, return NULL_TREE. */
7164 native_interpret_complex (tree type, unsigned char *ptr, int len)
7166 tree etype, rpart, ipart;
7169 etype = TREE_TYPE (type);
7170 size = GET_MODE_SIZE (TYPE_MODE (etype));
7173 rpart = native_interpret_expr (etype, ptr, size);
7176 ipart = native_interpret_expr (etype, ptr+size, size);
7179 return build_complex (type, rpart, ipart);
7183 /* Subroutine of native_interpret_expr. Interpret the contents of
7184 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7185 If the buffer cannot be interpreted, return NULL_TREE. */
7188 native_interpret_vector (tree type, unsigned char *ptr, int len)
7190 tree etype, elem, elements;
7193 etype = TREE_TYPE (type);
7194 size = GET_MODE_SIZE (TYPE_MODE (etype));
7195 count = TYPE_VECTOR_SUBPARTS (type);
7196 if (size * count > len)
7199 elements = NULL_TREE;
7200 for (i = count - 1; i >= 0; i--)
7202 elem = native_interpret_expr (etype, ptr+(i*size), size);
7205 elements = tree_cons (NULL_TREE, elem, elements);
7207 return build_vector (type, elements);
7211 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7212 the buffer PTR of length LEN as a constant of type TYPE. For
7213 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7214 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7215 return NULL_TREE. */
7218 native_interpret_expr (tree type, unsigned char *ptr, int len)
7220 switch (TREE_CODE (type))
7225 return native_interpret_int (type, ptr, len);
7228 return native_interpret_real (type, ptr, len);
7231 return native_interpret_complex (type, ptr, len);
7234 return native_interpret_vector (type, ptr, len);
7242 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7243 TYPE at compile-time. If we're unable to perform the conversion
7244 return NULL_TREE. */
7247 fold_view_convert_expr (tree type, tree expr)
7249 /* We support up to 512-bit values (for V8DFmode). */
7250 unsigned char buffer[64];
7253 /* Check that the host and target are sane. */
7254 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7257 len = native_encode_expr (expr, buffer, sizeof (buffer));
7261 return native_interpret_expr (type, buffer, len);
7265 /* Fold a unary expression of code CODE and type TYPE with operand
7266 OP0. Return the folded expression if folding is successful.
7267 Otherwise, return NULL_TREE. */
7270 fold_unary (enum tree_code code, tree type, tree op0)
7274 enum tree_code_class kind = TREE_CODE_CLASS (code);
7276 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7277 && TREE_CODE_LENGTH (code) == 1);
7282 if (code == NOP_EXPR || code == CONVERT_EXPR
7283 || code == FLOAT_EXPR || code == ABS_EXPR)
7285 /* Don't use STRIP_NOPS, because signedness of argument type
7287 STRIP_SIGN_NOPS (arg0);
7291 /* Strip any conversions that don't change the mode. This
7292 is safe for every expression, except for a comparison
7293 expression because its signedness is derived from its
7296 Note that this is done as an internal manipulation within
7297 the constant folder, in order to find the simplest
7298 representation of the arguments so that their form can be
7299 studied. In any cases, the appropriate type conversions
7300 should be put back in the tree that will get out of the
7306 if (TREE_CODE_CLASS (code) == tcc_unary)
7308 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7309 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7310 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7311 else if (TREE_CODE (arg0) == COND_EXPR)
7313 tree arg01 = TREE_OPERAND (arg0, 1);
7314 tree arg02 = TREE_OPERAND (arg0, 2);
7315 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7316 arg01 = fold_build1 (code, type, arg01);
7317 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7318 arg02 = fold_build1 (code, type, arg02);
7319 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7322 /* If this was a conversion, and all we did was to move into
7323 inside the COND_EXPR, bring it back out. But leave it if
7324 it is a conversion from integer to integer and the
7325 result precision is no wider than a word since such a
7326 conversion is cheap and may be optimized away by combine,
7327 while it couldn't if it were outside the COND_EXPR. Then return
7328 so we don't get into an infinite recursion loop taking the
7329 conversion out and then back in. */
7331 if ((code == NOP_EXPR || code == CONVERT_EXPR
7332 || code == NON_LVALUE_EXPR)
7333 && TREE_CODE (tem) == COND_EXPR
7334 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7335 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7336 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7337 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7338 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7339 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7340 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7342 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7343 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7344 || flag_syntax_only))
7345 tem = build1 (code, type,
7347 TREE_TYPE (TREE_OPERAND
7348 (TREE_OPERAND (tem, 1), 0)),
7349 TREE_OPERAND (tem, 0),
7350 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7351 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7354 else if (COMPARISON_CLASS_P (arg0))
7356 if (TREE_CODE (type) == BOOLEAN_TYPE)
7358 arg0 = copy_node (arg0);
7359 TREE_TYPE (arg0) = type;
7362 else if (TREE_CODE (type) != INTEGER_TYPE)
7363 return fold_build3 (COND_EXPR, type, arg0,
7364 fold_build1 (code, type,
7366 fold_build1 (code, type,
7367 integer_zero_node));
7376 case FIX_TRUNC_EXPR:
7377 if (TREE_TYPE (op0) == type)
7380 /* If we have (type) (a CMP b) and type is an integral type, return
7381 new expression involving the new type. */
7382 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7383 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7384 TREE_OPERAND (op0, 1));
7386 /* Handle cases of two conversions in a row. */
7387 if (TREE_CODE (op0) == NOP_EXPR
7388 || TREE_CODE (op0) == CONVERT_EXPR)
7390 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7391 tree inter_type = TREE_TYPE (op0);
7392 int inside_int = INTEGRAL_TYPE_P (inside_type);
7393 int inside_ptr = POINTER_TYPE_P (inside_type);
7394 int inside_float = FLOAT_TYPE_P (inside_type);
7395 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7396 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7397 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7398 int inter_int = INTEGRAL_TYPE_P (inter_type);
7399 int inter_ptr = POINTER_TYPE_P (inter_type);
7400 int inter_float = FLOAT_TYPE_P (inter_type);
7401 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7402 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7403 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7404 int final_int = INTEGRAL_TYPE_P (type);
7405 int final_ptr = POINTER_TYPE_P (type);
7406 int final_float = FLOAT_TYPE_P (type);
7407 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7408 unsigned int final_prec = TYPE_PRECISION (type);
7409 int final_unsignedp = TYPE_UNSIGNED (type);
7411 /* In addition to the cases of two conversions in a row
7412 handled below, if we are converting something to its own
7413 type via an object of identical or wider precision, neither
7414 conversion is needed. */
7415 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7416 && (((inter_int || inter_ptr) && final_int)
7417 || (inter_float && final_float))
7418 && inter_prec >= final_prec)
7419 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7421 /* Likewise, if the intermediate and final types are either both
7422 float or both integer, we don't need the middle conversion if
7423 it is wider than the final type and doesn't change the signedness
7424 (for integers). Avoid this if the final type is a pointer
7425 since then we sometimes need the inner conversion. Likewise if
7426 the outer has a precision not equal to the size of its mode. */
7427 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7428 || (inter_float && inside_float)
7429 || (inter_vec && inside_vec))
7430 && inter_prec >= inside_prec
7431 && (inter_float || inter_vec
7432 || inter_unsignedp == inside_unsignedp)
7433 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7434 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7436 && (! final_vec || inter_prec == inside_prec))
7437 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7439 /* If we have a sign-extension of a zero-extended value, we can
7440 replace that by a single zero-extension. */
7441 if (inside_int && inter_int && final_int
7442 && inside_prec < inter_prec && inter_prec < final_prec
7443 && inside_unsignedp && !inter_unsignedp)
7444 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7446 /* Two conversions in a row are not needed unless:
7447 - some conversion is floating-point (overstrict for now), or
7448 - some conversion is a vector (overstrict for now), or
7449 - the intermediate type is narrower than both initial and
7451 - the intermediate type and innermost type differ in signedness,
7452 and the outermost type is wider than the intermediate, or
7453 - the initial type is a pointer type and the precisions of the
7454 intermediate and final types differ, or
7455 - the final type is a pointer type and the precisions of the
7456 initial and intermediate types differ.
7457 - the final type is a pointer type and the initial type not
7458 - the initial type is a pointer to an array and the final type
7460 if (! inside_float && ! inter_float && ! final_float
7461 && ! inside_vec && ! inter_vec && ! final_vec
7462 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7463 && ! (inside_int && inter_int
7464 && inter_unsignedp != inside_unsignedp
7465 && inter_prec < final_prec)
7466 && ((inter_unsignedp && inter_prec > inside_prec)
7467 == (final_unsignedp && final_prec > inter_prec))
7468 && ! (inside_ptr && inter_prec != final_prec)
7469 && ! (final_ptr && inside_prec != inter_prec)
7470 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7471 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7472 && final_ptr == inside_ptr
7474 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7475 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7476 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7479 /* Handle (T *)&A.B.C for A being of type T and B and C
7480 living at offset zero. This occurs frequently in
7481 C++ upcasting and then accessing the base. */
7482 if (TREE_CODE (op0) == ADDR_EXPR
7483 && POINTER_TYPE_P (type)
7484 && handled_component_p (TREE_OPERAND (op0, 0)))
7486 HOST_WIDE_INT bitsize, bitpos;
7488 enum machine_mode mode;
7489 int unsignedp, volatilep;
7490 tree base = TREE_OPERAND (op0, 0);
7491 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7492 &mode, &unsignedp, &volatilep, false);
7493 /* If the reference was to a (constant) zero offset, we can use
7494 the address of the base if it has the same base type
7495 as the result type. */
7496 if (! offset && bitpos == 0
7497 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7498 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7499 return fold_convert (type, build_fold_addr_expr (base));
7502 if ((TREE_CODE (op0) == MODIFY_EXPR
7503 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7504 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7505 /* Detect assigning a bitfield. */
7506 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7508 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7510 /* Don't leave an assignment inside a conversion
7511 unless assigning a bitfield. */
7512 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7513 /* First do the assignment, then return converted constant. */
7514 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7515 TREE_NO_WARNING (tem) = 1;
7516 TREE_USED (tem) = 1;
7520 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7521 constants (if x has signed type, the sign bit cannot be set
7522 in c). This folds extension into the BIT_AND_EXPR. */
7523 if (INTEGRAL_TYPE_P (type)
7524 && TREE_CODE (type) != BOOLEAN_TYPE
7525 && TREE_CODE (op0) == BIT_AND_EXPR
7526 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7529 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7532 if (TYPE_UNSIGNED (TREE_TYPE (and))
7533 || (TYPE_PRECISION (type)
7534 <= TYPE_PRECISION (TREE_TYPE (and))))
7536 else if (TYPE_PRECISION (TREE_TYPE (and1))
7537 <= HOST_BITS_PER_WIDE_INT
7538 && host_integerp (and1, 1))
7540 unsigned HOST_WIDE_INT cst;
7542 cst = tree_low_cst (and1, 1);
7543 cst &= (HOST_WIDE_INT) -1
7544 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7545 change = (cst == 0);
7546 #ifdef LOAD_EXTEND_OP
7548 && !flag_syntax_only
7549 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7552 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7553 and0 = fold_convert (uns, and0);
7554 and1 = fold_convert (uns, and1);
7560 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7561 TREE_INT_CST_HIGH (and1), 0,
7562 TREE_OVERFLOW (and1),
7563 TREE_CONSTANT_OVERFLOW (and1));
7564 return fold_build2 (BIT_AND_EXPR, type,
7565 fold_convert (type, and0), tem);
7569 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7570 T2 being pointers to types of the same size. */
7571 if (POINTER_TYPE_P (type)
7572 && BINARY_CLASS_P (arg0)
7573 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7574 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7576 tree arg00 = TREE_OPERAND (arg0, 0);
7578 tree t1 = TREE_TYPE (arg00);
7579 tree tt0 = TREE_TYPE (t0);
7580 tree tt1 = TREE_TYPE (t1);
7581 tree s0 = TYPE_SIZE (tt0);
7582 tree s1 = TYPE_SIZE (tt1);
7584 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7585 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7586 TREE_OPERAND (arg0, 1));
7589 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7590 of the same precision, and X is a integer type not narrower than
7591 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7592 if (INTEGRAL_TYPE_P (type)
7593 && TREE_CODE (op0) == BIT_NOT_EXPR
7594 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7595 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7596 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7597 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7599 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7600 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7601 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7602 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7605 tem = fold_convert_const (code, type, arg0);
7606 return tem ? tem : NULL_TREE;
7608 case VIEW_CONVERT_EXPR:
7609 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7610 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7611 return fold_view_convert_expr (type, op0);
7614 tem = fold_negate_expr (arg0);
7616 return fold_convert (type, tem);
7620 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7621 return fold_abs_const (arg0, type);
7622 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7623 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7624 /* Convert fabs((double)float) into (double)fabsf(float). */
7625 else if (TREE_CODE (arg0) == NOP_EXPR
7626 && TREE_CODE (type) == REAL_TYPE)
7628 tree targ0 = strip_float_extensions (arg0);
7630 return fold_convert (type, fold_build1 (ABS_EXPR,
7634 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7635 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7638 /* Strip sign ops from argument. */
7639 if (TREE_CODE (type) == REAL_TYPE)
7641 tem = fold_strip_sign_ops (arg0);
7643 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7648 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7649 return fold_convert (type, arg0);
7650 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7652 tree itype = TREE_TYPE (type);
7653 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7654 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7655 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7657 if (TREE_CODE (arg0) == COMPLEX_CST)
7659 tree itype = TREE_TYPE (type);
7660 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7661 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7662 return build_complex (type, rpart, negate_expr (ipart));
7664 if (TREE_CODE (arg0) == CONJ_EXPR)
7665 return fold_convert (type, TREE_OPERAND (arg0, 0));
7669 if (TREE_CODE (arg0) == INTEGER_CST)
7670 return fold_not_const (arg0, type);
7671 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7672 return TREE_OPERAND (arg0, 0);
7673 /* Convert ~ (-A) to A - 1. */
7674 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7675 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7676 build_int_cst (type, 1));
7677 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7678 else if (INTEGRAL_TYPE_P (type)
7679 && ((TREE_CODE (arg0) == MINUS_EXPR
7680 && integer_onep (TREE_OPERAND (arg0, 1)))
7681 || (TREE_CODE (arg0) == PLUS_EXPR
7682 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7683 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7684 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7685 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7686 && (tem = fold_unary (BIT_NOT_EXPR, type,
7688 TREE_OPERAND (arg0, 0)))))
7689 return fold_build2 (BIT_XOR_EXPR, type, tem,
7690 fold_convert (type, TREE_OPERAND (arg0, 1)));
7691 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7692 && (tem = fold_unary (BIT_NOT_EXPR, type,
7694 TREE_OPERAND (arg0, 1)))))
7695 return fold_build2 (BIT_XOR_EXPR, type,
7696 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7700 case TRUTH_NOT_EXPR:
7701 /* The argument to invert_truthvalue must have Boolean type. */
7702 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7703 arg0 = fold_convert (boolean_type_node, arg0);
7705 /* Note that the operand of this must be an int
7706 and its values must be 0 or 1.
7707 ("true" is a fixed value perhaps depending on the language,
7708 but we don't handle values other than 1 correctly yet.) */
7709 tem = fold_truth_not_expr (arg0);
7712 return fold_convert (type, tem);
7715 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7716 return fold_convert (type, arg0);
7717 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7718 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7719 TREE_OPERAND (arg0, 1));
7720 if (TREE_CODE (arg0) == COMPLEX_CST)
7721 return fold_convert (type, TREE_REALPART (arg0));
7722 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7724 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7725 tem = fold_build2 (TREE_CODE (arg0), itype,
7726 fold_build1 (REALPART_EXPR, itype,
7727 TREE_OPERAND (arg0, 0)),
7728 fold_build1 (REALPART_EXPR, itype,
7729 TREE_OPERAND (arg0, 1)));
7730 return fold_convert (type, tem);
7732 if (TREE_CODE (arg0) == CONJ_EXPR)
7734 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7735 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7736 return fold_convert (type, tem);
7738 if (TREE_CODE (arg0) == CALL_EXPR)
7740 tree fn = get_callee_fndecl (arg0);
7741 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7742 switch (DECL_FUNCTION_CODE (fn))
7744 CASE_FLT_FN (BUILT_IN_CEXPI):
7745 fn = mathfn_built_in (type, BUILT_IN_COS);
7746 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7754 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7755 return fold_convert (type, integer_zero_node);
7756 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7757 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7758 TREE_OPERAND (arg0, 0));
7759 if (TREE_CODE (arg0) == COMPLEX_CST)
7760 return fold_convert (type, TREE_IMAGPART (arg0));
7761 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7763 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7764 tem = fold_build2 (TREE_CODE (arg0), itype,
7765 fold_build1 (IMAGPART_EXPR, itype,
7766 TREE_OPERAND (arg0, 0)),
7767 fold_build1 (IMAGPART_EXPR, itype,
7768 TREE_OPERAND (arg0, 1)));
7769 return fold_convert (type, tem);
7771 if (TREE_CODE (arg0) == CONJ_EXPR)
7773 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7774 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7775 return fold_convert (type, negate_expr (tem));
7777 if (TREE_CODE (arg0) == CALL_EXPR)
7779 tree fn = get_callee_fndecl (arg0);
7780 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7781 switch (DECL_FUNCTION_CODE (fn))
7783 CASE_FLT_FN (BUILT_IN_CEXPI):
7784 fn = mathfn_built_in (type, BUILT_IN_SIN);
7785 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7794 } /* switch (code) */
7797 /* Fold a binary expression of code CODE and type TYPE with operands
7798 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7799 Return the folded expression if folding is successful. Otherwise,
7800 return NULL_TREE. */
7803 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7805 enum tree_code compl_code;
7807 if (code == MIN_EXPR)
7808 compl_code = MAX_EXPR;
7809 else if (code == MAX_EXPR)
7810 compl_code = MIN_EXPR;
7814 /* MIN (MAX (a, b), b) == b. */
7815 if (TREE_CODE (op0) == compl_code
7816 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7817 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7819 /* MIN (MAX (b, a), b) == b. */
7820 if (TREE_CODE (op0) == compl_code
7821 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7822 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7823 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7825 /* MIN (a, MAX (a, b)) == a. */
7826 if (TREE_CODE (op1) == compl_code
7827 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7828 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7829 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7831 /* MIN (a, MAX (b, a)) == a. */
7832 if (TREE_CODE (op1) == compl_code
7833 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7834 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7835 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7840 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7841 by changing CODE to reduce the magnitude of constants involved in
7842 ARG0 of the comparison.
7843 Returns a canonicalized comparison tree if a simplification was
7844 possible, otherwise returns NULL_TREE. */
7847 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7848 tree arg0, tree arg1)
7850 enum tree_code code0 = TREE_CODE (arg0);
7851 tree t, cst0 = NULL_TREE;
7855 /* Match A +- CST code arg1 and CST code arg1. */
7856 if (!(((code0 == MINUS_EXPR
7857 || code0 == PLUS_EXPR)
7858 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7859 || code0 == INTEGER_CST))
7862 /* Identify the constant in arg0 and its sign. */
7863 if (code0 == INTEGER_CST)
7866 cst0 = TREE_OPERAND (arg0, 1);
7867 sgn0 = tree_int_cst_sgn (cst0);
7869 /* Overflowed constants and zero will cause problems. */
7870 if (integer_zerop (cst0)
7871 || TREE_OVERFLOW (cst0))
7874 /* See if we can reduce the magnitude of the constant in
7875 arg0 by changing the comparison code. */
7876 if (code0 == INTEGER_CST)
7878 /* CST <= arg1 -> CST-1 < arg1. */
7879 if (code == LE_EXPR && sgn0 == 1)
7881 /* -CST < arg1 -> -CST-1 <= arg1. */
7882 else if (code == LT_EXPR && sgn0 == -1)
7884 /* CST > arg1 -> CST-1 >= arg1. */
7885 else if (code == GT_EXPR && sgn0 == 1)
7887 /* -CST >= arg1 -> -CST-1 > arg1. */
7888 else if (code == GE_EXPR && sgn0 == -1)
7892 /* arg1 code' CST' might be more canonical. */
7897 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7899 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7901 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7902 else if (code == GT_EXPR
7903 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7905 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7906 else if (code == LE_EXPR
7907 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7909 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7910 else if (code == GE_EXPR
7911 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7917 /* Now build the constant reduced in magnitude. */
7918 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7919 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7920 if (code0 != INTEGER_CST)
7921 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7923 /* If swapping might yield to a more canonical form, do so. */
7925 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7927 return fold_build2 (code, type, t, arg1);
7930 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7931 overflow further. Try to decrease the magnitude of constants involved
7932 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7933 and put sole constants at the second argument position.
7934 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7937 maybe_canonicalize_comparison (enum tree_code code, tree type,
7938 tree arg0, tree arg1)
7942 /* In principle pointers also have undefined overflow behavior,
7943 but that causes problems elsewhere. */
7944 if ((flag_wrapv || flag_trapv)
7945 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7946 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7949 /* Try canonicalization by simplifying arg0. */
7950 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7954 /* Try canonicalization by simplifying arg1 using the swapped
7956 code = swap_tree_comparison (code);
7957 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7960 /* Subroutine of fold_binary. This routine performs all of the
7961 transformations that are common to the equality/inequality
7962 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7963 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7964 fold_binary should call fold_binary. Fold a comparison with
7965 tree code CODE and type TYPE with operands OP0 and OP1. Return
7966 the folded comparison or NULL_TREE. */
7969 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7971 tree arg0, arg1, tem;
7976 STRIP_SIGN_NOPS (arg0);
7977 STRIP_SIGN_NOPS (arg1);
7979 tem = fold_relational_const (code, type, arg0, arg1);
7980 if (tem != NULL_TREE)
7983 /* If one arg is a real or integer constant, put it last. */
7984 if (tree_swap_operands_p (arg0, arg1, true))
7985 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7987 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7988 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7989 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7990 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7991 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7992 && !(flag_wrapv || flag_trapv))
7993 && (TREE_CODE (arg1) == INTEGER_CST
7994 && !TREE_OVERFLOW (arg1)))
7996 tree const1 = TREE_OPERAND (arg0, 1);
7998 tree variable = TREE_OPERAND (arg0, 0);
8001 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8003 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8004 TREE_TYPE (arg1), const2, const1);
8005 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8006 && (TREE_CODE (lhs) != INTEGER_CST
8007 || !TREE_OVERFLOW (lhs)))
8008 return fold_build2 (code, type, variable, lhs);
8011 /* For comparisons of pointers we can decompose it to a compile time
8012 comparison of the base objects and the offsets into the object.
8013 This requires at least one operand being an ADDR_EXPR to do more
8014 than the operand_equal_p test below. */
8015 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8016 && (TREE_CODE (arg0) == ADDR_EXPR
8017 || TREE_CODE (arg1) == ADDR_EXPR))
8019 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8020 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8021 enum machine_mode mode;
8022 int volatilep, unsignedp;
8023 bool indirect_base0 = false;
8025 /* Get base and offset for the access. Strip ADDR_EXPR for
8026 get_inner_reference, but put it back by stripping INDIRECT_REF
8027 off the base object if possible. */
8029 if (TREE_CODE (arg0) == ADDR_EXPR)
8031 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8032 &bitsize, &bitpos0, &offset0, &mode,
8033 &unsignedp, &volatilep, false);
8034 if (TREE_CODE (base0) == INDIRECT_REF)
8035 base0 = TREE_OPERAND (base0, 0);
8037 indirect_base0 = true;
8041 if (TREE_CODE (arg1) == ADDR_EXPR)
8043 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8044 &bitsize, &bitpos1, &offset1, &mode,
8045 &unsignedp, &volatilep, false);
8046 /* We have to make sure to have an indirect/non-indirect base1
8047 just the same as we did for base0. */
8048 if (TREE_CODE (base1) == INDIRECT_REF
8050 base1 = TREE_OPERAND (base1, 0);
8051 else if (!indirect_base0)
8054 else if (indirect_base0)
8057 /* If we have equivalent bases we might be able to simplify. */
8059 && operand_equal_p (base0, base1, 0))
8061 /* We can fold this expression to a constant if the non-constant
8062 offset parts are equal. */
8063 if (offset0 == offset1
8064 || (offset0 && offset1
8065 && operand_equal_p (offset0, offset1, 0)))
8070 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8072 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8074 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8076 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8078 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8080 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8084 /* We can simplify the comparison to a comparison of the variable
8085 offset parts if the constant offset parts are equal.
8086 Be careful to use signed size type here because otherwise we
8087 mess with array offsets in the wrong way. This is possible
8088 because pointer arithmetic is restricted to retain within an
8089 object and overflow on pointer differences is undefined as of
8090 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8091 else if (bitpos0 == bitpos1)
8093 tree signed_size_type_node;
8094 signed_size_type_node = signed_type_for (size_type_node);
8096 /* By converting to signed size type we cover middle-end pointer
8097 arithmetic which operates on unsigned pointer types of size
8098 type size and ARRAY_REF offsets which are properly sign or
8099 zero extended from their type in case it is narrower than
8101 if (offset0 == NULL_TREE)
8102 offset0 = build_int_cst (signed_size_type_node, 0);
8104 offset0 = fold_convert (signed_size_type_node, offset0);
8105 if (offset1 == NULL_TREE)
8106 offset1 = build_int_cst (signed_size_type_node, 0);
8108 offset1 = fold_convert (signed_size_type_node, offset1);
8110 return fold_build2 (code, type, offset0, offset1);
8115 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8116 same object, then we can fold this to a comparison of the two offsets in
8117 signed size type. This is possible because pointer arithmetic is
8118 restricted to retain within an object and overflow on pointer differences
8119 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8120 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8121 && !flag_wrapv && !flag_trapv)
8123 tree base0, offset0, base1, offset1;
8125 if (extract_array_ref (arg0, &base0, &offset0)
8126 && extract_array_ref (arg1, &base1, &offset1)
8127 && operand_equal_p (base0, base1, 0))
8129 tree signed_size_type_node;
8130 signed_size_type_node = signed_type_for (size_type_node);
8132 /* By converting to signed size type we cover middle-end pointer
8133 arithmetic which operates on unsigned pointer types of size
8134 type size and ARRAY_REF offsets which are properly sign or
8135 zero extended from their type in case it is narrower than
8137 if (offset0 == NULL_TREE)
8138 offset0 = build_int_cst (signed_size_type_node, 0);
8140 offset0 = fold_convert (signed_size_type_node, offset0);
8141 if (offset1 == NULL_TREE)
8142 offset1 = build_int_cst (signed_size_type_node, 0);
8144 offset1 = fold_convert (signed_size_type_node, offset1);
8146 return fold_build2 (code, type, offset0, offset1);
8150 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8151 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8152 the resulting offset is smaller in absolute value than the
8154 if (!(flag_wrapv || flag_trapv)
8155 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8156 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8157 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8158 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8159 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8160 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8161 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8163 tree const1 = TREE_OPERAND (arg0, 1);
8164 tree const2 = TREE_OPERAND (arg1, 1);
8165 tree variable1 = TREE_OPERAND (arg0, 0);
8166 tree variable2 = TREE_OPERAND (arg1, 0);
8169 /* Put the constant on the side where it doesn't overflow and is
8170 of lower absolute value than before. */
8171 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8172 ? MINUS_EXPR : PLUS_EXPR,
8174 if (!TREE_OVERFLOW (cst)
8175 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8176 return fold_build2 (code, type,
8178 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8181 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8182 ? MINUS_EXPR : PLUS_EXPR,
8184 if (!TREE_OVERFLOW (cst)
8185 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8186 return fold_build2 (code, type,
8187 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8192 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8193 signed arithmetic case. That form is created by the compiler
8194 often enough for folding it to be of value. One example is in
8195 computing loop trip counts after Operator Strength Reduction. */
8196 if (!(flag_wrapv || flag_trapv)
8197 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8198 && TREE_CODE (arg0) == MULT_EXPR
8199 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8200 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8201 && integer_zerop (arg1))
8203 tree const1 = TREE_OPERAND (arg0, 1);
8204 tree const2 = arg1; /* zero */
8205 tree variable1 = TREE_OPERAND (arg0, 0);
8206 enum tree_code cmp_code = code;
8208 gcc_assert (!integer_zerop (const1));
8210 /* If const1 is negative we swap the sense of the comparison. */
8211 if (tree_int_cst_sgn (const1) < 0)
8212 cmp_code = swap_tree_comparison (cmp_code);
8214 return fold_build2 (cmp_code, type, variable1, const2);
8217 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8221 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8223 tree targ0 = strip_float_extensions (arg0);
8224 tree targ1 = strip_float_extensions (arg1);
8225 tree newtype = TREE_TYPE (targ0);
8227 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8228 newtype = TREE_TYPE (targ1);
8230 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8231 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8232 return fold_build2 (code, type, fold_convert (newtype, targ0),
8233 fold_convert (newtype, targ1));
8235 /* (-a) CMP (-b) -> b CMP a */
8236 if (TREE_CODE (arg0) == NEGATE_EXPR
8237 && TREE_CODE (arg1) == NEGATE_EXPR)
8238 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8239 TREE_OPERAND (arg0, 0));
8241 if (TREE_CODE (arg1) == REAL_CST)
8243 REAL_VALUE_TYPE cst;
8244 cst = TREE_REAL_CST (arg1);
8246 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8247 if (TREE_CODE (arg0) == NEGATE_EXPR)
8248 return fold_build2 (swap_tree_comparison (code), type,
8249 TREE_OPERAND (arg0, 0),
8250 build_real (TREE_TYPE (arg1),
8251 REAL_VALUE_NEGATE (cst)));
8253 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8254 /* a CMP (-0) -> a CMP 0 */
8255 if (REAL_VALUE_MINUS_ZERO (cst))
8256 return fold_build2 (code, type, arg0,
8257 build_real (TREE_TYPE (arg1), dconst0));
8259 /* x != NaN is always true, other ops are always false. */
8260 if (REAL_VALUE_ISNAN (cst)
8261 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8263 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8264 return omit_one_operand (type, tem, arg0);
8267 /* Fold comparisons against infinity. */
8268 if (REAL_VALUE_ISINF (cst))
8270 tem = fold_inf_compare (code, type, arg0, arg1);
8271 if (tem != NULL_TREE)
8276 /* If this is a comparison of a real constant with a PLUS_EXPR
8277 or a MINUS_EXPR of a real constant, we can convert it into a
8278 comparison with a revised real constant as long as no overflow
8279 occurs when unsafe_math_optimizations are enabled. */
8280 if (flag_unsafe_math_optimizations
8281 && TREE_CODE (arg1) == REAL_CST
8282 && (TREE_CODE (arg0) == PLUS_EXPR
8283 || TREE_CODE (arg0) == MINUS_EXPR)
8284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8285 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8286 ? MINUS_EXPR : PLUS_EXPR,
8287 arg1, TREE_OPERAND (arg0, 1), 0))
8288 && ! TREE_CONSTANT_OVERFLOW (tem))
8289 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8291 /* Likewise, we can simplify a comparison of a real constant with
8292 a MINUS_EXPR whose first operand is also a real constant, i.e.
8293 (c1 - x) < c2 becomes x > c1-c2. */
8294 if (flag_unsafe_math_optimizations
8295 && TREE_CODE (arg1) == REAL_CST
8296 && TREE_CODE (arg0) == MINUS_EXPR
8297 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8298 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8300 && ! TREE_CONSTANT_OVERFLOW (tem))
8301 return fold_build2 (swap_tree_comparison (code), type,
8302 TREE_OPERAND (arg0, 1), tem);
8304 /* Fold comparisons against built-in math functions. */
8305 if (TREE_CODE (arg1) == REAL_CST
8306 && flag_unsafe_math_optimizations
8307 && ! flag_errno_math)
8309 enum built_in_function fcode = builtin_mathfn_code (arg0);
8311 if (fcode != END_BUILTINS)
8313 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8314 if (tem != NULL_TREE)
8320 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8321 if (TREE_CONSTANT (arg1)
8322 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8323 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8324 /* This optimization is invalid for ordered comparisons
8325 if CONST+INCR overflows or if foo+incr might overflow.
8326 This optimization is invalid for floating point due to rounding.
8327 For pointer types we assume overflow doesn't happen. */
8328 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8329 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8330 && (code == EQ_EXPR || code == NE_EXPR))))
8332 tree varop, newconst;
8334 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8336 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8337 arg1, TREE_OPERAND (arg0, 1));
8338 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8339 TREE_OPERAND (arg0, 0),
8340 TREE_OPERAND (arg0, 1));
8344 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8345 arg1, TREE_OPERAND (arg0, 1));
8346 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8347 TREE_OPERAND (arg0, 0),
8348 TREE_OPERAND (arg0, 1));
8352 /* If VAROP is a reference to a bitfield, we must mask
8353 the constant by the width of the field. */
8354 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8355 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8356 && host_integerp (DECL_SIZE (TREE_OPERAND
8357 (TREE_OPERAND (varop, 0), 1)), 1))
8359 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8360 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8361 tree folded_compare, shift;
8363 /* First check whether the comparison would come out
8364 always the same. If we don't do that we would
8365 change the meaning with the masking. */
8366 folded_compare = fold_build2 (code, type,
8367 TREE_OPERAND (varop, 0), arg1);
8368 if (TREE_CODE (folded_compare) == INTEGER_CST)
8369 return omit_one_operand (type, folded_compare, varop);
8371 shift = build_int_cst (NULL_TREE,
8372 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8373 shift = fold_convert (TREE_TYPE (varop), shift);
8374 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8376 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8380 return fold_build2 (code, type, varop, newconst);
8383 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8384 && (TREE_CODE (arg0) == NOP_EXPR
8385 || TREE_CODE (arg0) == CONVERT_EXPR))
8387 /* If we are widening one operand of an integer comparison,
8388 see if the other operand is similarly being widened. Perhaps we
8389 can do the comparison in the narrower type. */
8390 tem = fold_widened_comparison (code, type, arg0, arg1);
8394 /* Or if we are changing signedness. */
8395 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8400 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8401 constant, we can simplify it. */
8402 if (TREE_CODE (arg1) == INTEGER_CST
8403 && (TREE_CODE (arg0) == MIN_EXPR
8404 || TREE_CODE (arg0) == MAX_EXPR)
8405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8407 tem = optimize_minmax_comparison (code, type, op0, op1);
8412 /* Simplify comparison of something with itself. (For IEEE
8413 floating-point, we can only do some of these simplifications.) */
8414 if (operand_equal_p (arg0, arg1, 0))
8419 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8420 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8421 return constant_boolean_node (1, type);
8426 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8427 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8428 return constant_boolean_node (1, type);
8429 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8432 /* For NE, we can only do this simplification if integer
8433 or we don't honor IEEE floating point NaNs. */
8434 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8435 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8437 /* ... fall through ... */
8440 return constant_boolean_node (0, type);
8446 /* If we are comparing an expression that just has comparisons
8447 of two integer values, arithmetic expressions of those comparisons,
8448 and constants, we can simplify it. There are only three cases
8449 to check: the two values can either be equal, the first can be
8450 greater, or the second can be greater. Fold the expression for
8451 those three values. Since each value must be 0 or 1, we have
8452 eight possibilities, each of which corresponds to the constant 0
8453 or 1 or one of the six possible comparisons.
8455 This handles common cases like (a > b) == 0 but also handles
8456 expressions like ((x > y) - (y > x)) > 0, which supposedly
8457 occur in macroized code. */
8459 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8461 tree cval1 = 0, cval2 = 0;
8464 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8465 /* Don't handle degenerate cases here; they should already
8466 have been handled anyway. */
8467 && cval1 != 0 && cval2 != 0
8468 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8469 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8470 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8471 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8472 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8473 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8474 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8476 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8477 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8479 /* We can't just pass T to eval_subst in case cval1 or cval2
8480 was the same as ARG1. */
8483 = fold_build2 (code, type,
8484 eval_subst (arg0, cval1, maxval,
8488 = fold_build2 (code, type,
8489 eval_subst (arg0, cval1, maxval,
8493 = fold_build2 (code, type,
8494 eval_subst (arg0, cval1, minval,
8498 /* All three of these results should be 0 or 1. Confirm they are.
8499 Then use those values to select the proper code to use. */
8501 if (TREE_CODE (high_result) == INTEGER_CST
8502 && TREE_CODE (equal_result) == INTEGER_CST
8503 && TREE_CODE (low_result) == INTEGER_CST)
8505 /* Make a 3-bit mask with the high-order bit being the
8506 value for `>', the next for '=', and the low for '<'. */
8507 switch ((integer_onep (high_result) * 4)
8508 + (integer_onep (equal_result) * 2)
8509 + integer_onep (low_result))
8513 return omit_one_operand (type, integer_zero_node, arg0);
8534 return omit_one_operand (type, integer_one_node, arg0);
8538 return save_expr (build2 (code, type, cval1, cval2));
8539 return fold_build2 (code, type, cval1, cval2);
8544 /* Fold a comparison of the address of COMPONENT_REFs with the same
8545 type and component to a comparison of the address of the base
8546 object. In short, &x->a OP &y->a to x OP y and
8547 &x->a OP &y.a to x OP &y */
8548 if (TREE_CODE (arg0) == ADDR_EXPR
8549 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8550 && TREE_CODE (arg1) == ADDR_EXPR
8551 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8553 tree cref0 = TREE_OPERAND (arg0, 0);
8554 tree cref1 = TREE_OPERAND (arg1, 0);
8555 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8557 tree op0 = TREE_OPERAND (cref0, 0);
8558 tree op1 = TREE_OPERAND (cref1, 0);
8559 return fold_build2 (code, type,
8560 build_fold_addr_expr (op0),
8561 build_fold_addr_expr (op1));
8565 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8566 into a single range test. */
8567 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8568 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8569 && TREE_CODE (arg1) == INTEGER_CST
8570 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8571 && !integer_zerop (TREE_OPERAND (arg0, 1))
8572 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8573 && !TREE_OVERFLOW (arg1))
8575 tem = fold_div_compare (code, type, arg0, arg1);
8576 if (tem != NULL_TREE)
8580 /* Fold ~X op ~Y as Y op X. */
8581 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8582 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8583 return fold_build2 (code, type,
8584 TREE_OPERAND (arg1, 0),
8585 TREE_OPERAND (arg0, 0));
8587 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8588 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8589 && TREE_CODE (arg1) == INTEGER_CST)
8590 return fold_build2 (swap_tree_comparison (code), type,
8591 TREE_OPERAND (arg0, 0),
8592 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8598 /* Subroutine of fold_binary. Optimize complex multiplications of the
8599 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8600 argument EXPR represents the expression "z" of type TYPE. */
8603 fold_mult_zconjz (tree type, tree expr)
8605 tree itype = TREE_TYPE (type);
8606 tree rpart, ipart, tem;
8608 if (TREE_CODE (expr) == COMPLEX_EXPR)
8610 rpart = TREE_OPERAND (expr, 0);
8611 ipart = TREE_OPERAND (expr, 1);
8613 else if (TREE_CODE (expr) == COMPLEX_CST)
8615 rpart = TREE_REALPART (expr);
8616 ipart = TREE_IMAGPART (expr);
8620 expr = save_expr (expr);
8621 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8622 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8625 rpart = save_expr (rpart);
8626 ipart = save_expr (ipart);
8627 tem = fold_build2 (PLUS_EXPR, itype,
8628 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8629 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8630 return fold_build2 (COMPLEX_EXPR, type, tem,
8631 fold_convert (itype, integer_zero_node));
8635 /* Fold a binary expression of code CODE and type TYPE with operands
8636 OP0 and OP1. Return the folded expression if folding is
8637 successful. Otherwise, return NULL_TREE. */
8640 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8642 enum tree_code_class kind = TREE_CODE_CLASS (code);
8643 tree arg0, arg1, tem;
8644 tree t1 = NULL_TREE;
8646 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8647 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8648 && TREE_CODE_LENGTH (code) == 2
8650 && op1 != NULL_TREE);
8655 /* Strip any conversions that don't change the mode. This is
8656 safe for every expression, except for a comparison expression
8657 because its signedness is derived from its operands. So, in
8658 the latter case, only strip conversions that don't change the
8661 Note that this is done as an internal manipulation within the
8662 constant folder, in order to find the simplest representation
8663 of the arguments so that their form can be studied. In any
8664 cases, the appropriate type conversions should be put back in
8665 the tree that will get out of the constant folder. */
8667 if (kind == tcc_comparison)
8669 STRIP_SIGN_NOPS (arg0);
8670 STRIP_SIGN_NOPS (arg1);
8678 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8679 constant but we can't do arithmetic on them. */
8680 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8681 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8682 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8683 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8685 if (kind == tcc_binary)
8686 tem = const_binop (code, arg0, arg1, 0);
8687 else if (kind == tcc_comparison)
8688 tem = fold_relational_const (code, type, arg0, arg1);
8692 if (tem != NULL_TREE)
8694 if (TREE_TYPE (tem) != type)
8695 tem = fold_convert (type, tem);
8700 /* If this is a commutative operation, and ARG0 is a constant, move it
8701 to ARG1 to reduce the number of tests below. */
8702 if (commutative_tree_code (code)
8703 && tree_swap_operands_p (arg0, arg1, true))
8704 return fold_build2 (code, type, op1, op0);
8706 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8708 First check for cases where an arithmetic operation is applied to a
8709 compound, conditional, or comparison operation. Push the arithmetic
8710 operation inside the compound or conditional to see if any folding
8711 can then be done. Convert comparison to conditional for this purpose.
8712 The also optimizes non-constant cases that used to be done in
8715 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8716 one of the operands is a comparison and the other is a comparison, a
8717 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8718 code below would make the expression more complex. Change it to a
8719 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8720 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8722 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8723 || code == EQ_EXPR || code == NE_EXPR)
8724 && ((truth_value_p (TREE_CODE (arg0))
8725 && (truth_value_p (TREE_CODE (arg1))
8726 || (TREE_CODE (arg1) == BIT_AND_EXPR
8727 && integer_onep (TREE_OPERAND (arg1, 1)))))
8728 || (truth_value_p (TREE_CODE (arg1))
8729 && (truth_value_p (TREE_CODE (arg0))
8730 || (TREE_CODE (arg0) == BIT_AND_EXPR
8731 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8733 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8734 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8737 fold_convert (boolean_type_node, arg0),
8738 fold_convert (boolean_type_node, arg1));
8740 if (code == EQ_EXPR)
8741 tem = invert_truthvalue (tem);
8743 return fold_convert (type, tem);
8746 if (TREE_CODE_CLASS (code) == tcc_binary
8747 || TREE_CODE_CLASS (code) == tcc_comparison)
8749 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8750 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8751 fold_build2 (code, type,
8752 TREE_OPERAND (arg0, 1), op1));
8753 if (TREE_CODE (arg1) == COMPOUND_EXPR
8754 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8755 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8756 fold_build2 (code, type,
8757 op0, TREE_OPERAND (arg1, 1)));
8759 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8761 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8763 /*cond_first_p=*/1);
8764 if (tem != NULL_TREE)
8768 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8770 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8772 /*cond_first_p=*/0);
8773 if (tem != NULL_TREE)
8781 /* A + (-B) -> A - B */
8782 if (TREE_CODE (arg1) == NEGATE_EXPR)
8783 return fold_build2 (MINUS_EXPR, type,
8784 fold_convert (type, arg0),
8785 fold_convert (type, TREE_OPERAND (arg1, 0)));
8786 /* (-A) + B -> B - A */
8787 if (TREE_CODE (arg0) == NEGATE_EXPR
8788 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8789 return fold_build2 (MINUS_EXPR, type,
8790 fold_convert (type, arg1),
8791 fold_convert (type, TREE_OPERAND (arg0, 0)));
8792 /* Convert ~A + 1 to -A. */
8793 if (INTEGRAL_TYPE_P (type)
8794 && TREE_CODE (arg0) == BIT_NOT_EXPR
8795 && integer_onep (arg1))
8796 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8798 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8800 if ((TREE_CODE (arg0) == MULT_EXPR
8801 || TREE_CODE (arg1) == MULT_EXPR)
8802 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8804 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8809 if (! FLOAT_TYPE_P (type))
8811 if (integer_zerop (arg1))
8812 return non_lvalue (fold_convert (type, arg0));
8815 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8816 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8817 && !TYPE_TRAP_SIGNED (type))
8819 t1 = build_int_cst_type (type, -1);
8820 return omit_one_operand (type, t1, arg1);
8824 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8825 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8826 && !TYPE_TRAP_SIGNED (type))
8828 t1 = build_int_cst_type (type, -1);
8829 return omit_one_operand (type, t1, arg0);
8832 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8833 with a constant, and the two constants have no bits in common,
8834 we should treat this as a BIT_IOR_EXPR since this may produce more
8836 if (TREE_CODE (arg0) == BIT_AND_EXPR
8837 && TREE_CODE (arg1) == BIT_AND_EXPR
8838 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8839 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8840 && integer_zerop (const_binop (BIT_AND_EXPR,
8841 TREE_OPERAND (arg0, 1),
8842 TREE_OPERAND (arg1, 1), 0)))
8844 code = BIT_IOR_EXPR;
8848 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8849 (plus (plus (mult) (mult)) (foo)) so that we can
8850 take advantage of the factoring cases below. */
8851 if (((TREE_CODE (arg0) == PLUS_EXPR
8852 || TREE_CODE (arg0) == MINUS_EXPR)
8853 && TREE_CODE (arg1) == MULT_EXPR)
8854 || ((TREE_CODE (arg1) == PLUS_EXPR
8855 || TREE_CODE (arg1) == MINUS_EXPR)
8856 && TREE_CODE (arg0) == MULT_EXPR))
8858 tree parg0, parg1, parg, marg;
8859 enum tree_code pcode;
8861 if (TREE_CODE (arg1) == MULT_EXPR)
8862 parg = arg0, marg = arg1;
8864 parg = arg1, marg = arg0;
8865 pcode = TREE_CODE (parg);
8866 parg0 = TREE_OPERAND (parg, 0);
8867 parg1 = TREE_OPERAND (parg, 1);
8871 if (TREE_CODE (parg0) == MULT_EXPR
8872 && TREE_CODE (parg1) != MULT_EXPR)
8873 return fold_build2 (pcode, type,
8874 fold_build2 (PLUS_EXPR, type,
8875 fold_convert (type, parg0),
8876 fold_convert (type, marg)),
8877 fold_convert (type, parg1));
8878 if (TREE_CODE (parg0) != MULT_EXPR
8879 && TREE_CODE (parg1) == MULT_EXPR)
8880 return fold_build2 (PLUS_EXPR, type,
8881 fold_convert (type, parg0),
8882 fold_build2 (pcode, type,
8883 fold_convert (type, marg),
8888 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8889 of the array. Loop optimizer sometimes produce this type of
8891 if (TREE_CODE (arg0) == ADDR_EXPR)
8893 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8895 return fold_convert (type, tem);
8897 else if (TREE_CODE (arg1) == ADDR_EXPR)
8899 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8901 return fold_convert (type, tem);
8906 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8907 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8908 return non_lvalue (fold_convert (type, arg0));
8910 /* Likewise if the operands are reversed. */
8911 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8912 return non_lvalue (fold_convert (type, arg1));
8914 /* Convert X + -C into X - C. */
8915 if (TREE_CODE (arg1) == REAL_CST
8916 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8918 tem = fold_negate_const (arg1, type);
8919 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8920 return fold_build2 (MINUS_EXPR, type,
8921 fold_convert (type, arg0),
8922 fold_convert (type, tem));
8925 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8926 to __complex__ ( x, y ). This is not the same for SNaNs or
8927 if singed zeros are involved. */
8928 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8929 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8930 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8932 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8933 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8934 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8935 bool arg0rz = false, arg0iz = false;
8936 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8937 || (arg0i && (arg0iz = real_zerop (arg0i))))
8939 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8940 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8941 if (arg0rz && arg1i && real_zerop (arg1i))
8943 tree rp = arg1r ? arg1r
8944 : build1 (REALPART_EXPR, rtype, arg1);
8945 tree ip = arg0i ? arg0i
8946 : build1 (IMAGPART_EXPR, rtype, arg0);
8947 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8949 else if (arg0iz && arg1r && real_zerop (arg1r))
8951 tree rp = arg0r ? arg0r
8952 : build1 (REALPART_EXPR, rtype, arg0);
8953 tree ip = arg1i ? arg1i
8954 : build1 (IMAGPART_EXPR, rtype, arg1);
8955 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8960 if (flag_unsafe_math_optimizations
8961 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8962 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8963 && (tem = distribute_real_division (code, type, arg0, arg1)))
8966 /* Convert x+x into x*2.0. */
8967 if (operand_equal_p (arg0, arg1, 0)
8968 && SCALAR_FLOAT_TYPE_P (type))
8969 return fold_build2 (MULT_EXPR, type, arg0,
8970 build_real (type, dconst2));
8972 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8973 if (flag_unsafe_math_optimizations
8974 && TREE_CODE (arg1) == PLUS_EXPR
8975 && TREE_CODE (arg0) != MULT_EXPR)
8977 tree tree10 = TREE_OPERAND (arg1, 0);
8978 tree tree11 = TREE_OPERAND (arg1, 1);
8979 if (TREE_CODE (tree11) == MULT_EXPR
8980 && TREE_CODE (tree10) == MULT_EXPR)
8983 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8984 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8987 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8988 if (flag_unsafe_math_optimizations
8989 && TREE_CODE (arg0) == PLUS_EXPR
8990 && TREE_CODE (arg1) != MULT_EXPR)
8992 tree tree00 = TREE_OPERAND (arg0, 0);
8993 tree tree01 = TREE_OPERAND (arg0, 1);
8994 if (TREE_CODE (tree01) == MULT_EXPR
8995 && TREE_CODE (tree00) == MULT_EXPR)
8998 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8999 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9005 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9006 is a rotate of A by C1 bits. */
9007 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9008 is a rotate of A by B bits. */
9010 enum tree_code code0, code1;
9011 code0 = TREE_CODE (arg0);
9012 code1 = TREE_CODE (arg1);
9013 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9014 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9015 && operand_equal_p (TREE_OPERAND (arg0, 0),
9016 TREE_OPERAND (arg1, 0), 0)
9017 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9019 tree tree01, tree11;
9020 enum tree_code code01, code11;
9022 tree01 = TREE_OPERAND (arg0, 1);
9023 tree11 = TREE_OPERAND (arg1, 1);
9024 STRIP_NOPS (tree01);
9025 STRIP_NOPS (tree11);
9026 code01 = TREE_CODE (tree01);
9027 code11 = TREE_CODE (tree11);
9028 if (code01 == INTEGER_CST
9029 && code11 == INTEGER_CST
9030 && TREE_INT_CST_HIGH (tree01) == 0
9031 && TREE_INT_CST_HIGH (tree11) == 0
9032 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9033 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9034 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9035 code0 == LSHIFT_EXPR ? tree01 : tree11);
9036 else if (code11 == MINUS_EXPR)
9038 tree tree110, tree111;
9039 tree110 = TREE_OPERAND (tree11, 0);
9040 tree111 = TREE_OPERAND (tree11, 1);
9041 STRIP_NOPS (tree110);
9042 STRIP_NOPS (tree111);
9043 if (TREE_CODE (tree110) == INTEGER_CST
9044 && 0 == compare_tree_int (tree110,
9046 (TREE_TYPE (TREE_OPERAND
9048 && operand_equal_p (tree01, tree111, 0))
9049 return build2 ((code0 == LSHIFT_EXPR
9052 type, TREE_OPERAND (arg0, 0), tree01);
9054 else if (code01 == MINUS_EXPR)
9056 tree tree010, tree011;
9057 tree010 = TREE_OPERAND (tree01, 0);
9058 tree011 = TREE_OPERAND (tree01, 1);
9059 STRIP_NOPS (tree010);
9060 STRIP_NOPS (tree011);
9061 if (TREE_CODE (tree010) == INTEGER_CST
9062 && 0 == compare_tree_int (tree010,
9064 (TREE_TYPE (TREE_OPERAND
9066 && operand_equal_p (tree11, tree011, 0))
9067 return build2 ((code0 != LSHIFT_EXPR
9070 type, TREE_OPERAND (arg0, 0), tree11);
9076 /* In most languages, can't associate operations on floats through
9077 parentheses. Rather than remember where the parentheses were, we
9078 don't associate floats at all, unless the user has specified
9079 -funsafe-math-optimizations. */
9081 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9083 tree var0, con0, lit0, minus_lit0;
9084 tree var1, con1, lit1, minus_lit1;
9086 /* Split both trees into variables, constants, and literals. Then
9087 associate each group together, the constants with literals,
9088 then the result with variables. This increases the chances of
9089 literals being recombined later and of generating relocatable
9090 expressions for the sum of a constant and literal. */
9091 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9092 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9093 code == MINUS_EXPR);
9095 /* Only do something if we found more than two objects. Otherwise,
9096 nothing has changed and we risk infinite recursion. */
9097 if (2 < ((var0 != 0) + (var1 != 0)
9098 + (con0 != 0) + (con1 != 0)
9099 + (lit0 != 0) + (lit1 != 0)
9100 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9102 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9103 if (code == MINUS_EXPR)
9106 var0 = associate_trees (var0, var1, code, type);
9107 con0 = associate_trees (con0, con1, code, type);
9108 lit0 = associate_trees (lit0, lit1, code, type);
9109 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9111 /* Preserve the MINUS_EXPR if the negative part of the literal is
9112 greater than the positive part. Otherwise, the multiplicative
9113 folding code (i.e extract_muldiv) may be fooled in case
9114 unsigned constants are subtracted, like in the following
9115 example: ((X*2 + 4) - 8U)/2. */
9116 if (minus_lit0 && lit0)
9118 if (TREE_CODE (lit0) == INTEGER_CST
9119 && TREE_CODE (minus_lit0) == INTEGER_CST
9120 && tree_int_cst_lt (lit0, minus_lit0))
9122 minus_lit0 = associate_trees (minus_lit0, lit0,
9128 lit0 = associate_trees (lit0, minus_lit0,
9136 return fold_convert (type,
9137 associate_trees (var0, minus_lit0,
9141 con0 = associate_trees (con0, minus_lit0,
9143 return fold_convert (type,
9144 associate_trees (var0, con0,
9149 con0 = associate_trees (con0, lit0, code, type);
9150 return fold_convert (type, associate_trees (var0, con0,
9158 /* A - (-B) -> A + B */
9159 if (TREE_CODE (arg1) == NEGATE_EXPR)
9160 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9161 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9162 if (TREE_CODE (arg0) == NEGATE_EXPR
9163 && (FLOAT_TYPE_P (type)
9164 || INTEGRAL_TYPE_P (type))
9165 && negate_expr_p (arg1)
9166 && reorder_operands_p (arg0, arg1))
9167 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9168 TREE_OPERAND (arg0, 0));
9169 /* Convert -A - 1 to ~A. */
9170 if (INTEGRAL_TYPE_P (type)
9171 && TREE_CODE (arg0) == NEGATE_EXPR
9172 && integer_onep (arg1)
9173 && !TYPE_TRAP_SIGNED (type))
9174 return fold_build1 (BIT_NOT_EXPR, type,
9175 fold_convert (type, TREE_OPERAND (arg0, 0)));
9177 /* Convert -1 - A to ~A. */
9178 if (INTEGRAL_TYPE_P (type)
9179 && integer_all_onesp (arg0))
9180 return fold_build1 (BIT_NOT_EXPR, type, op1);
9182 if (! FLOAT_TYPE_P (type))
9184 if (integer_zerop (arg0))
9185 return negate_expr (fold_convert (type, arg1));
9186 if (integer_zerop (arg1))
9187 return non_lvalue (fold_convert (type, arg0));
9189 /* Fold A - (A & B) into ~B & A. */
9190 if (!TREE_SIDE_EFFECTS (arg0)
9191 && TREE_CODE (arg1) == BIT_AND_EXPR)
9193 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9194 return fold_build2 (BIT_AND_EXPR, type,
9195 fold_build1 (BIT_NOT_EXPR, type,
9196 TREE_OPERAND (arg1, 0)),
9198 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9199 return fold_build2 (BIT_AND_EXPR, type,
9200 fold_build1 (BIT_NOT_EXPR, type,
9201 TREE_OPERAND (arg1, 1)),
9205 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9206 any power of 2 minus 1. */
9207 if (TREE_CODE (arg0) == BIT_AND_EXPR
9208 && TREE_CODE (arg1) == BIT_AND_EXPR
9209 && operand_equal_p (TREE_OPERAND (arg0, 0),
9210 TREE_OPERAND (arg1, 0), 0))
9212 tree mask0 = TREE_OPERAND (arg0, 1);
9213 tree mask1 = TREE_OPERAND (arg1, 1);
9214 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9216 if (operand_equal_p (tem, mask1, 0))
9218 tem = fold_build2 (BIT_XOR_EXPR, type,
9219 TREE_OPERAND (arg0, 0), mask1);
9220 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9225 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9226 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9227 return non_lvalue (fold_convert (type, arg0));
9229 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9230 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9231 (-ARG1 + ARG0) reduces to -ARG1. */
9232 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9233 return negate_expr (fold_convert (type, arg1));
9235 /* Fold &x - &x. This can happen from &x.foo - &x.
9236 This is unsafe for certain floats even in non-IEEE formats.
9237 In IEEE, it is unsafe because it does wrong for NaNs.
9238 Also note that operand_equal_p is always false if an operand
9241 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9242 && operand_equal_p (arg0, arg1, 0))
9243 return fold_convert (type, integer_zero_node);
9245 /* A - B -> A + (-B) if B is easily negatable. */
9246 if (negate_expr_p (arg1)
9247 && ((FLOAT_TYPE_P (type)
9248 /* Avoid this transformation if B is a positive REAL_CST. */
9249 && (TREE_CODE (arg1) != REAL_CST
9250 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9251 || INTEGRAL_TYPE_P (type)))
9252 return fold_build2 (PLUS_EXPR, type,
9253 fold_convert (type, arg0),
9254 fold_convert (type, negate_expr (arg1)));
9256 /* Try folding difference of addresses. */
9260 if ((TREE_CODE (arg0) == ADDR_EXPR
9261 || TREE_CODE (arg1) == ADDR_EXPR)
9262 && ptr_difference_const (arg0, arg1, &diff))
9263 return build_int_cst_type (type, diff);
9266 /* Fold &a[i] - &a[j] to i-j. */
9267 if (TREE_CODE (arg0) == ADDR_EXPR
9268 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9269 && TREE_CODE (arg1) == ADDR_EXPR
9270 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9272 tree aref0 = TREE_OPERAND (arg0, 0);
9273 tree aref1 = TREE_OPERAND (arg1, 0);
9274 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9275 TREE_OPERAND (aref1, 0), 0))
9277 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9278 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9279 tree esz = array_ref_element_size (aref0);
9280 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9281 return fold_build2 (MULT_EXPR, type, diff,
9282 fold_convert (type, esz));
9287 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9288 of the array. Loop optimizer sometimes produce this type of
9290 if (TREE_CODE (arg0) == ADDR_EXPR)
9292 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9294 return fold_convert (type, tem);
9297 if (flag_unsafe_math_optimizations
9298 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9299 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9300 && (tem = distribute_real_division (code, type, arg0, arg1)))
9303 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9305 if ((TREE_CODE (arg0) == MULT_EXPR
9306 || TREE_CODE (arg1) == MULT_EXPR)
9307 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9309 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9317 /* (-A) * (-B) -> A * B */
9318 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9319 return fold_build2 (MULT_EXPR, type,
9320 fold_convert (type, TREE_OPERAND (arg0, 0)),
9321 fold_convert (type, negate_expr (arg1)));
9322 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9323 return fold_build2 (MULT_EXPR, type,
9324 fold_convert (type, negate_expr (arg0)),
9325 fold_convert (type, TREE_OPERAND (arg1, 0)));
9327 if (! FLOAT_TYPE_P (type))
9329 if (integer_zerop (arg1))
9330 return omit_one_operand (type, arg1, arg0);
9331 if (integer_onep (arg1))
9332 return non_lvalue (fold_convert (type, arg0));
9333 /* Transform x * -1 into -x. */
9334 if (integer_all_onesp (arg1))
9335 return fold_convert (type, negate_expr (arg0));
9336 /* Transform x * -C into -x * C if x is easily negatable. */
9337 if (TREE_CODE (arg1) == INTEGER_CST
9338 && tree_int_cst_sgn (arg1) == -1
9339 && negate_expr_p (arg0)
9340 && (tem = negate_expr (arg1)) != arg1
9341 && !TREE_OVERFLOW (tem))
9342 return fold_build2 (MULT_EXPR, type,
9343 negate_expr (arg0), tem);
9345 /* (a * (1 << b)) is (a << b) */
9346 if (TREE_CODE (arg1) == LSHIFT_EXPR
9347 && integer_onep (TREE_OPERAND (arg1, 0)))
9348 return fold_build2 (LSHIFT_EXPR, type, arg0,
9349 TREE_OPERAND (arg1, 1));
9350 if (TREE_CODE (arg0) == LSHIFT_EXPR
9351 && integer_onep (TREE_OPERAND (arg0, 0)))
9352 return fold_build2 (LSHIFT_EXPR, type, arg1,
9353 TREE_OPERAND (arg0, 1));
9355 if (TREE_CODE (arg1) == INTEGER_CST
9356 && 0 != (tem = extract_muldiv (op0,
9357 fold_convert (type, arg1),
9359 return fold_convert (type, tem);
9361 /* Optimize z * conj(z) for integer complex numbers. */
9362 if (TREE_CODE (arg0) == CONJ_EXPR
9363 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9364 return fold_mult_zconjz (type, arg1);
9365 if (TREE_CODE (arg1) == CONJ_EXPR
9366 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9367 return fold_mult_zconjz (type, arg0);
9371 /* Maybe fold x * 0 to 0. The expressions aren't the same
9372 when x is NaN, since x * 0 is also NaN. Nor are they the
9373 same in modes with signed zeros, since multiplying a
9374 negative value by 0 gives -0, not +0. */
9375 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9376 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9377 && real_zerop (arg1))
9378 return omit_one_operand (type, arg1, arg0);
9379 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9380 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9381 && real_onep (arg1))
9382 return non_lvalue (fold_convert (type, arg0));
9384 /* Transform x * -1.0 into -x. */
9385 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9386 && real_minus_onep (arg1))
9387 return fold_convert (type, negate_expr (arg0));
9389 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9390 if (flag_unsafe_math_optimizations
9391 && TREE_CODE (arg0) == RDIV_EXPR
9392 && TREE_CODE (arg1) == REAL_CST
9393 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9395 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9398 return fold_build2 (RDIV_EXPR, type, tem,
9399 TREE_OPERAND (arg0, 1));
9402 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9403 if (operand_equal_p (arg0, arg1, 0))
9405 tree tem = fold_strip_sign_ops (arg0);
9406 if (tem != NULL_TREE)
9408 tem = fold_convert (type, tem);
9409 return fold_build2 (MULT_EXPR, type, tem, tem);
9413 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9414 This is not the same for NaNs or if singed zeros are
9416 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9417 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9418 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9419 && TREE_CODE (arg1) == COMPLEX_CST
9420 && real_zerop (TREE_REALPART (arg1)))
9422 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9423 if (real_onep (TREE_IMAGPART (arg1)))
9424 return fold_build2 (COMPLEX_EXPR, type,
9425 negate_expr (fold_build1 (IMAGPART_EXPR,
9427 fold_build1 (REALPART_EXPR, rtype, arg0));
9428 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9429 return fold_build2 (COMPLEX_EXPR, type,
9430 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9431 negate_expr (fold_build1 (REALPART_EXPR,
9435 /* Optimize z * conj(z) for floating point complex numbers.
9436 Guarded by flag_unsafe_math_optimizations as non-finite
9437 imaginary components don't produce scalar results. */
9438 if (flag_unsafe_math_optimizations
9439 && TREE_CODE (arg0) == CONJ_EXPR
9440 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9441 return fold_mult_zconjz (type, arg1);
9442 if (flag_unsafe_math_optimizations
9443 && TREE_CODE (arg1) == CONJ_EXPR
9444 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9445 return fold_mult_zconjz (type, arg0);
9447 if (flag_unsafe_math_optimizations)
9449 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9450 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9452 /* Optimizations of root(...)*root(...). */
9453 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9455 tree rootfn, arg, arglist;
9456 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9457 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9459 /* Optimize sqrt(x)*sqrt(x) as x. */
9460 if (BUILTIN_SQRT_P (fcode0)
9461 && operand_equal_p (arg00, arg10, 0)
9462 && ! HONOR_SNANS (TYPE_MODE (type)))
9465 /* Optimize root(x)*root(y) as root(x*y). */
9466 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9467 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9468 arglist = build_tree_list (NULL_TREE, arg);
9469 return build_function_call_expr (rootfn, arglist);
9472 /* Optimize expN(x)*expN(y) as expN(x+y). */
9473 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9475 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9476 tree arg = fold_build2 (PLUS_EXPR, type,
9477 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9478 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9479 tree arglist = build_tree_list (NULL_TREE, arg);
9480 return build_function_call_expr (expfn, arglist);
9483 /* Optimizations of pow(...)*pow(...). */
9484 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9485 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9486 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9488 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9489 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9491 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9492 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9495 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9496 if (operand_equal_p (arg01, arg11, 0))
9498 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9499 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9500 tree arglist = tree_cons (NULL_TREE, arg,
9501 build_tree_list (NULL_TREE,
9503 return build_function_call_expr (powfn, arglist);
9506 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9507 if (operand_equal_p (arg00, arg10, 0))
9509 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9510 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9511 tree arglist = tree_cons (NULL_TREE, arg00,
9512 build_tree_list (NULL_TREE,
9514 return build_function_call_expr (powfn, arglist);
9518 /* Optimize tan(x)*cos(x) as sin(x). */
9519 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9520 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9521 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9522 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9523 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9524 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9525 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9526 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9528 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9530 if (sinfn != NULL_TREE)
9531 return build_function_call_expr (sinfn,
9532 TREE_OPERAND (arg0, 1));
9535 /* Optimize x*pow(x,c) as pow(x,c+1). */
9536 if (fcode1 == BUILT_IN_POW
9537 || fcode1 == BUILT_IN_POWF
9538 || fcode1 == BUILT_IN_POWL)
9540 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9541 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9543 if (TREE_CODE (arg11) == REAL_CST
9544 && ! TREE_CONSTANT_OVERFLOW (arg11)
9545 && operand_equal_p (arg0, arg10, 0))
9547 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9551 c = TREE_REAL_CST (arg11);
9552 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9553 arg = build_real (type, c);
9554 arglist = build_tree_list (NULL_TREE, arg);
9555 arglist = tree_cons (NULL_TREE, arg0, arglist);
9556 return build_function_call_expr (powfn, arglist);
9560 /* Optimize pow(x,c)*x as pow(x,c+1). */
9561 if (fcode0 == BUILT_IN_POW
9562 || fcode0 == BUILT_IN_POWF
9563 || fcode0 == BUILT_IN_POWL)
9565 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9566 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9568 if (TREE_CODE (arg01) == REAL_CST
9569 && ! TREE_CONSTANT_OVERFLOW (arg01)
9570 && operand_equal_p (arg1, arg00, 0))
9572 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9576 c = TREE_REAL_CST (arg01);
9577 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9578 arg = build_real (type, c);
9579 arglist = build_tree_list (NULL_TREE, arg);
9580 arglist = tree_cons (NULL_TREE, arg1, arglist);
9581 return build_function_call_expr (powfn, arglist);
9585 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9587 && operand_equal_p (arg0, arg1, 0))
9589 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9593 tree arg = build_real (type, dconst2);
9594 tree arglist = build_tree_list (NULL_TREE, arg);
9595 arglist = tree_cons (NULL_TREE, arg0, arglist);
9596 return build_function_call_expr (powfn, arglist);
9605 if (integer_all_onesp (arg1))
9606 return omit_one_operand (type, arg1, arg0);
9607 if (integer_zerop (arg1))
9608 return non_lvalue (fold_convert (type, arg0));
9609 if (operand_equal_p (arg0, arg1, 0))
9610 return non_lvalue (fold_convert (type, arg0));
9613 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9614 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9616 t1 = build_int_cst_type (type, -1);
9617 return omit_one_operand (type, t1, arg1);
9621 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9622 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9624 t1 = build_int_cst_type (type, -1);
9625 return omit_one_operand (type, t1, arg0);
9628 /* Canonicalize (X & C1) | C2. */
9629 if (TREE_CODE (arg0) == BIT_AND_EXPR
9630 && TREE_CODE (arg1) == INTEGER_CST
9631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9633 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9634 int width = TYPE_PRECISION (type);
9635 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9636 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9637 hi2 = TREE_INT_CST_HIGH (arg1);
9638 lo2 = TREE_INT_CST_LOW (arg1);
9640 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9641 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9642 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9644 if (width > HOST_BITS_PER_WIDE_INT)
9646 mhi = (unsigned HOST_WIDE_INT) -1
9647 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9653 mlo = (unsigned HOST_WIDE_INT) -1
9654 >> (HOST_BITS_PER_WIDE_INT - width);
9657 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9658 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9659 return fold_build2 (BIT_IOR_EXPR, type,
9660 TREE_OPERAND (arg0, 0), arg1);
9662 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9665 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9666 return fold_build2 (BIT_IOR_EXPR, type,
9667 fold_build2 (BIT_AND_EXPR, type,
9668 TREE_OPERAND (arg0, 0),
9669 build_int_cst_wide (type,
9675 /* (X & Y) | Y is (X, Y). */
9676 if (TREE_CODE (arg0) == BIT_AND_EXPR
9677 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9678 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9679 /* (X & Y) | X is (Y, X). */
9680 if (TREE_CODE (arg0) == BIT_AND_EXPR
9681 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9682 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9683 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9684 /* X | (X & Y) is (Y, X). */
9685 if (TREE_CODE (arg1) == BIT_AND_EXPR
9686 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9687 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9688 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9689 /* X | (Y & X) is (Y, X). */
9690 if (TREE_CODE (arg1) == BIT_AND_EXPR
9691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9692 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9693 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9695 t1 = distribute_bit_expr (code, type, arg0, arg1);
9696 if (t1 != NULL_TREE)
9699 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9701 This results in more efficient code for machines without a NAND
9702 instruction. Combine will canonicalize to the first form
9703 which will allow use of NAND instructions provided by the
9704 backend if they exist. */
9705 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9706 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9708 return fold_build1 (BIT_NOT_EXPR, type,
9709 build2 (BIT_AND_EXPR, type,
9710 TREE_OPERAND (arg0, 0),
9711 TREE_OPERAND (arg1, 0)));
9714 /* See if this can be simplified into a rotate first. If that
9715 is unsuccessful continue in the association code. */
9719 if (integer_zerop (arg1))
9720 return non_lvalue (fold_convert (type, arg0));
9721 if (integer_all_onesp (arg1))
9722 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9723 if (operand_equal_p (arg0, arg1, 0))
9724 return omit_one_operand (type, integer_zero_node, arg0);
9727 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9728 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9730 t1 = build_int_cst_type (type, -1);
9731 return omit_one_operand (type, t1, arg1);
9735 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9736 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9738 t1 = build_int_cst_type (type, -1);
9739 return omit_one_operand (type, t1, arg0);
9742 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9743 with a constant, and the two constants have no bits in common,
9744 we should treat this as a BIT_IOR_EXPR since this may produce more
9746 if (TREE_CODE (arg0) == BIT_AND_EXPR
9747 && TREE_CODE (arg1) == BIT_AND_EXPR
9748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9749 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9750 && integer_zerop (const_binop (BIT_AND_EXPR,
9751 TREE_OPERAND (arg0, 1),
9752 TREE_OPERAND (arg1, 1), 0)))
9754 code = BIT_IOR_EXPR;
9758 /* (X | Y) ^ X -> Y & ~ X*/
9759 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9760 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9762 tree t2 = TREE_OPERAND (arg0, 1);
9763 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9765 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9766 fold_convert (type, t1));
9770 /* (Y | X) ^ X -> Y & ~ X*/
9771 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9772 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9774 tree t2 = TREE_OPERAND (arg0, 0);
9775 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9777 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9778 fold_convert (type, t1));
9782 /* X ^ (X | Y) -> Y & ~ X*/
9783 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9784 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9786 tree t2 = TREE_OPERAND (arg1, 1);
9787 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9789 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9790 fold_convert (type, t1));
9794 /* X ^ (Y | X) -> Y & ~ X*/
9795 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9796 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9798 tree t2 = TREE_OPERAND (arg1, 0);
9799 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9801 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9802 fold_convert (type, t1));
9806 /* Convert ~X ^ ~Y to X ^ Y. */
9807 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9808 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9809 return fold_build2 (code, type,
9810 fold_convert (type, TREE_OPERAND (arg0, 0)),
9811 fold_convert (type, TREE_OPERAND (arg1, 0)));
9813 /* Convert ~X ^ C to X ^ ~C. */
9814 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9815 && TREE_CODE (arg1) == INTEGER_CST)
9816 return fold_build2 (code, type,
9817 fold_convert (type, TREE_OPERAND (arg0, 0)),
9818 fold_build1 (BIT_NOT_EXPR, type, arg1));
9820 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9821 if (TREE_CODE (arg0) == BIT_AND_EXPR
9822 && integer_onep (TREE_OPERAND (arg0, 1))
9823 && integer_onep (arg1))
9824 return fold_build2 (EQ_EXPR, type, arg0,
9825 build_int_cst (TREE_TYPE (arg0), 0));
9827 /* Fold (X & Y) ^ Y as ~X & Y. */
9828 if (TREE_CODE (arg0) == BIT_AND_EXPR
9829 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9831 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9832 return fold_build2 (BIT_AND_EXPR, type,
9833 fold_build1 (BIT_NOT_EXPR, type, tem),
9834 fold_convert (type, arg1));
9836 /* Fold (X & Y) ^ X as ~Y & X. */
9837 if (TREE_CODE (arg0) == BIT_AND_EXPR
9838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9839 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9841 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9842 return fold_build2 (BIT_AND_EXPR, type,
9843 fold_build1 (BIT_NOT_EXPR, type, tem),
9844 fold_convert (type, arg1));
9846 /* Fold X ^ (X & Y) as X & ~Y. */
9847 if (TREE_CODE (arg1) == BIT_AND_EXPR
9848 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9850 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9851 return fold_build2 (BIT_AND_EXPR, type,
9852 fold_convert (type, arg0),
9853 fold_build1 (BIT_NOT_EXPR, type, tem));
9855 /* Fold X ^ (Y & X) as ~Y & X. */
9856 if (TREE_CODE (arg1) == BIT_AND_EXPR
9857 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9858 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9860 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9861 return fold_build2 (BIT_AND_EXPR, type,
9862 fold_build1 (BIT_NOT_EXPR, type, tem),
9863 fold_convert (type, arg0));
9866 /* See if this can be simplified into a rotate first. If that
9867 is unsuccessful continue in the association code. */
9871 if (integer_all_onesp (arg1))
9872 return non_lvalue (fold_convert (type, arg0));
9873 if (integer_zerop (arg1))
9874 return omit_one_operand (type, arg1, arg0);
9875 if (operand_equal_p (arg0, arg1, 0))
9876 return non_lvalue (fold_convert (type, arg0));
9878 /* ~X & X is always zero. */
9879 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9880 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9881 return omit_one_operand (type, integer_zero_node, arg1);
9883 /* X & ~X is always zero. */
9884 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9885 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9886 return omit_one_operand (type, integer_zero_node, arg0);
9888 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9889 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9890 && TREE_CODE (arg1) == INTEGER_CST
9891 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9892 return fold_build2 (BIT_IOR_EXPR, type,
9893 fold_build2 (BIT_AND_EXPR, type,
9894 TREE_OPERAND (arg0, 0), arg1),
9895 fold_build2 (BIT_AND_EXPR, type,
9896 TREE_OPERAND (arg0, 1), arg1));
9898 /* (X | Y) & Y is (X, Y). */
9899 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9900 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9901 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9902 /* (X | Y) & X is (Y, X). */
9903 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9904 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9905 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9906 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9907 /* X & (X | Y) is (Y, X). */
9908 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9909 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9910 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9911 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9912 /* X & (Y | X) is (Y, X). */
9913 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9914 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9915 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9916 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9918 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9919 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9920 && integer_onep (TREE_OPERAND (arg0, 1))
9921 && integer_onep (arg1))
9923 tem = TREE_OPERAND (arg0, 0);
9924 return fold_build2 (EQ_EXPR, type,
9925 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9926 build_int_cst (TREE_TYPE (tem), 1)),
9927 build_int_cst (TREE_TYPE (tem), 0));
9929 /* Fold ~X & 1 as (X & 1) == 0. */
9930 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9931 && integer_onep (arg1))
9933 tem = TREE_OPERAND (arg0, 0);
9934 return fold_build2 (EQ_EXPR, type,
9935 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9936 build_int_cst (TREE_TYPE (tem), 1)),
9937 build_int_cst (TREE_TYPE (tem), 0));
9940 /* Fold (X ^ Y) & Y as ~X & Y. */
9941 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9942 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9944 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9945 return fold_build2 (BIT_AND_EXPR, type,
9946 fold_build1 (BIT_NOT_EXPR, type, tem),
9947 fold_convert (type, arg1));
9949 /* Fold (X ^ Y) & X as ~Y & X. */
9950 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9951 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9952 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9954 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9955 return fold_build2 (BIT_AND_EXPR, type,
9956 fold_build1 (BIT_NOT_EXPR, type, tem),
9957 fold_convert (type, arg1));
9959 /* Fold X & (X ^ Y) as X & ~Y. */
9960 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9961 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9963 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9964 return fold_build2 (BIT_AND_EXPR, type,
9965 fold_convert (type, arg0),
9966 fold_build1 (BIT_NOT_EXPR, type, tem));
9968 /* Fold X & (Y ^ X) as ~Y & X. */
9969 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9970 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9971 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9973 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9974 return fold_build2 (BIT_AND_EXPR, type,
9975 fold_build1 (BIT_NOT_EXPR, type, tem),
9976 fold_convert (type, arg0));
9979 t1 = distribute_bit_expr (code, type, arg0, arg1);
9980 if (t1 != NULL_TREE)
9982 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9983 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9984 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9987 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9989 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9990 && (~TREE_INT_CST_LOW (arg1)
9991 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9992 return fold_convert (type, TREE_OPERAND (arg0, 0));
9995 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9997 This results in more efficient code for machines without a NOR
9998 instruction. Combine will canonicalize to the first form
9999 which will allow use of NOR instructions provided by the
10000 backend if they exist. */
10001 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10002 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10004 return fold_build1 (BIT_NOT_EXPR, type,
10005 build2 (BIT_IOR_EXPR, type,
10006 TREE_OPERAND (arg0, 0),
10007 TREE_OPERAND (arg1, 0)));
10013 /* Don't touch a floating-point divide by zero unless the mode
10014 of the constant can represent infinity. */
10015 if (TREE_CODE (arg1) == REAL_CST
10016 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10017 && real_zerop (arg1))
10020 /* Optimize A / A to 1.0 if we don't care about
10021 NaNs or Infinities. Skip the transformation
10022 for non-real operands. */
10023 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10024 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10025 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10026 && operand_equal_p (arg0, arg1, 0))
10028 tree r = build_real (TREE_TYPE (arg0), dconst1);
10030 return omit_two_operands (type, r, arg0, arg1);
10033 /* The complex version of the above A / A optimization. */
10034 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10035 && operand_equal_p (arg0, arg1, 0))
10037 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10038 if (! HONOR_NANS (TYPE_MODE (elem_type))
10039 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10041 tree r = build_real (elem_type, dconst1);
10042 /* omit_two_operands will call fold_convert for us. */
10043 return omit_two_operands (type, r, arg0, arg1);
10047 /* (-A) / (-B) -> A / B */
10048 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10049 return fold_build2 (RDIV_EXPR, type,
10050 TREE_OPERAND (arg0, 0),
10051 negate_expr (arg1));
10052 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10053 return fold_build2 (RDIV_EXPR, type,
10054 negate_expr (arg0),
10055 TREE_OPERAND (arg1, 0));
10057 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10058 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10059 && real_onep (arg1))
10060 return non_lvalue (fold_convert (type, arg0));
10062 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10063 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10064 && real_minus_onep (arg1))
10065 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10067 /* If ARG1 is a constant, we can convert this to a multiply by the
10068 reciprocal. This does not have the same rounding properties,
10069 so only do this if -funsafe-math-optimizations. We can actually
10070 always safely do it if ARG1 is a power of two, but it's hard to
10071 tell if it is or not in a portable manner. */
10072 if (TREE_CODE (arg1) == REAL_CST)
10074 if (flag_unsafe_math_optimizations
10075 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10077 return fold_build2 (MULT_EXPR, type, arg0, tem);
10078 /* Find the reciprocal if optimizing and the result is exact. */
10082 r = TREE_REAL_CST (arg1);
10083 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10085 tem = build_real (type, r);
10086 return fold_build2 (MULT_EXPR, type,
10087 fold_convert (type, arg0), tem);
10091 /* Convert A/B/C to A/(B*C). */
10092 if (flag_unsafe_math_optimizations
10093 && TREE_CODE (arg0) == RDIV_EXPR)
10094 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10095 fold_build2 (MULT_EXPR, type,
10096 TREE_OPERAND (arg0, 1), arg1));
10098 /* Convert A/(B/C) to (A/B)*C. */
10099 if (flag_unsafe_math_optimizations
10100 && TREE_CODE (arg1) == RDIV_EXPR)
10101 return fold_build2 (MULT_EXPR, type,
10102 fold_build2 (RDIV_EXPR, type, arg0,
10103 TREE_OPERAND (arg1, 0)),
10104 TREE_OPERAND (arg1, 1));
10106 /* Convert C1/(X*C2) into (C1/C2)/X. */
10107 if (flag_unsafe_math_optimizations
10108 && TREE_CODE (arg1) == MULT_EXPR
10109 && TREE_CODE (arg0) == REAL_CST
10110 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10112 tree tem = const_binop (RDIV_EXPR, arg0,
10113 TREE_OPERAND (arg1, 1), 0);
10115 return fold_build2 (RDIV_EXPR, type, tem,
10116 TREE_OPERAND (arg1, 0));
10119 if (flag_unsafe_math_optimizations)
10121 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10122 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10124 /* Optimize sin(x)/cos(x) as tan(x). */
10125 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10126 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10127 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10128 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10129 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10131 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10133 if (tanfn != NULL_TREE)
10134 return build_function_call_expr (tanfn,
10135 TREE_OPERAND (arg0, 1));
10138 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10139 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10140 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10141 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10142 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10143 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10145 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10147 if (tanfn != NULL_TREE)
10149 tree tmp = TREE_OPERAND (arg0, 1);
10150 tmp = build_function_call_expr (tanfn, tmp);
10151 return fold_build2 (RDIV_EXPR, type,
10152 build_real (type, dconst1), tmp);
10156 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10157 NaNs or Infinities. */
10158 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10159 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10160 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10162 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10163 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10165 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10166 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10167 && operand_equal_p (arg00, arg01, 0))
10169 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10171 if (cosfn != NULL_TREE)
10172 return build_function_call_expr (cosfn,
10173 TREE_OPERAND (arg0, 1));
10177 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10178 NaNs or Infinities. */
10179 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10180 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10181 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10183 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10184 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10186 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10187 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10188 && operand_equal_p (arg00, arg01, 0))
10190 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10192 if (cosfn != NULL_TREE)
10194 tree tmp = TREE_OPERAND (arg0, 1);
10195 tmp = build_function_call_expr (cosfn, tmp);
10196 return fold_build2 (RDIV_EXPR, type,
10197 build_real (type, dconst1),
10203 /* Optimize pow(x,c)/x as pow(x,c-1). */
10204 if (fcode0 == BUILT_IN_POW
10205 || fcode0 == BUILT_IN_POWF
10206 || fcode0 == BUILT_IN_POWL)
10208 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10209 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10210 if (TREE_CODE (arg01) == REAL_CST
10211 && ! TREE_CONSTANT_OVERFLOW (arg01)
10212 && operand_equal_p (arg1, arg00, 0))
10214 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10218 c = TREE_REAL_CST (arg01);
10219 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10220 arg = build_real (type, c);
10221 arglist = build_tree_list (NULL_TREE, arg);
10222 arglist = tree_cons (NULL_TREE, arg1, arglist);
10223 return build_function_call_expr (powfn, arglist);
10227 /* Optimize x/expN(y) into x*expN(-y). */
10228 if (BUILTIN_EXPONENT_P (fcode1))
10230 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10231 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10232 tree arglist = build_tree_list (NULL_TREE,
10233 fold_convert (type, arg));
10234 arg1 = build_function_call_expr (expfn, arglist);
10235 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10238 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10239 if (fcode1 == BUILT_IN_POW
10240 || fcode1 == BUILT_IN_POWF
10241 || fcode1 == BUILT_IN_POWL)
10243 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10244 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10245 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10246 tree neg11 = fold_convert (type, negate_expr (arg11));
10247 tree arglist = tree_cons(NULL_TREE, arg10,
10248 build_tree_list (NULL_TREE, neg11));
10249 arg1 = build_function_call_expr (powfn, arglist);
10250 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10255 case TRUNC_DIV_EXPR:
10256 case FLOOR_DIV_EXPR:
10257 /* Simplify A / (B << N) where A and B are positive and B is
10258 a power of 2, to A >> (N + log2(B)). */
10259 if (TREE_CODE (arg1) == LSHIFT_EXPR
10260 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10262 tree sval = TREE_OPERAND (arg1, 0);
10263 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10265 tree sh_cnt = TREE_OPERAND (arg1, 1);
10266 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10268 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10269 sh_cnt, build_int_cst (NULL_TREE, pow2));
10270 return fold_build2 (RSHIFT_EXPR, type,
10271 fold_convert (type, arg0), sh_cnt);
10276 case ROUND_DIV_EXPR:
10277 case CEIL_DIV_EXPR:
10278 case EXACT_DIV_EXPR:
10279 if (integer_onep (arg1))
10280 return non_lvalue (fold_convert (type, arg0));
10281 if (integer_zerop (arg1))
10283 /* X / -1 is -X. */
10284 if (!TYPE_UNSIGNED (type)
10285 && TREE_CODE (arg1) == INTEGER_CST
10286 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10287 && TREE_INT_CST_HIGH (arg1) == -1)
10288 return fold_convert (type, negate_expr (arg0));
10290 /* Convert -A / -B to A / B when the type is signed and overflow is
10292 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10293 && TREE_CODE (arg0) == NEGATE_EXPR
10294 && negate_expr_p (arg1))
10295 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10296 negate_expr (arg1));
10297 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10298 && TREE_CODE (arg1) == NEGATE_EXPR
10299 && negate_expr_p (arg0))
10300 return fold_build2 (code, type, negate_expr (arg0),
10301 TREE_OPERAND (arg1, 0));
10303 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10304 operation, EXACT_DIV_EXPR.
10306 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10307 At one time others generated faster code, it's not clear if they do
10308 after the last round to changes to the DIV code in expmed.c. */
10309 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10310 && multiple_of_p (type, arg0, arg1))
10311 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10313 if (TREE_CODE (arg1) == INTEGER_CST
10314 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10315 return fold_convert (type, tem);
10319 case CEIL_MOD_EXPR:
10320 case FLOOR_MOD_EXPR:
10321 case ROUND_MOD_EXPR:
10322 case TRUNC_MOD_EXPR:
10323 /* X % 1 is always zero, but be sure to preserve any side
10325 if (integer_onep (arg1))
10326 return omit_one_operand (type, integer_zero_node, arg0);
10328 /* X % 0, return X % 0 unchanged so that we can get the
10329 proper warnings and errors. */
10330 if (integer_zerop (arg1))
10333 /* 0 % X is always zero, but be sure to preserve any side
10334 effects in X. Place this after checking for X == 0. */
10335 if (integer_zerop (arg0))
10336 return omit_one_operand (type, integer_zero_node, arg1);
10338 /* X % -1 is zero. */
10339 if (!TYPE_UNSIGNED (type)
10340 && TREE_CODE (arg1) == INTEGER_CST
10341 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10342 && TREE_INT_CST_HIGH (arg1) == -1)
10343 return omit_one_operand (type, integer_zero_node, arg0);
10345 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10346 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10347 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10348 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10351 /* Also optimize A % (C << N) where C is a power of 2,
10352 to A & ((C << N) - 1). */
10353 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10354 c = TREE_OPERAND (arg1, 0);
10356 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10358 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10359 build_int_cst (TREE_TYPE (arg1), 1));
10360 return fold_build2 (BIT_AND_EXPR, type,
10361 fold_convert (type, arg0),
10362 fold_convert (type, mask));
10366 /* X % -C is the same as X % C. */
10367 if (code == TRUNC_MOD_EXPR
10368 && !TYPE_UNSIGNED (type)
10369 && TREE_CODE (arg1) == INTEGER_CST
10370 && !TREE_CONSTANT_OVERFLOW (arg1)
10371 && TREE_INT_CST_HIGH (arg1) < 0
10373 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10374 && !sign_bit_p (arg1, arg1))
10375 return fold_build2 (code, type, fold_convert (type, arg0),
10376 fold_convert (type, negate_expr (arg1)));
10378 /* X % -Y is the same as X % Y. */
10379 if (code == TRUNC_MOD_EXPR
10380 && !TYPE_UNSIGNED (type)
10381 && TREE_CODE (arg1) == NEGATE_EXPR
10383 return fold_build2 (code, type, fold_convert (type, arg0),
10384 fold_convert (type, TREE_OPERAND (arg1, 0)));
10386 if (TREE_CODE (arg1) == INTEGER_CST
10387 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10388 return fold_convert (type, tem);
10394 if (integer_all_onesp (arg0))
10395 return omit_one_operand (type, arg0, arg1);
10399 /* Optimize -1 >> x for arithmetic right shifts. */
10400 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10401 return omit_one_operand (type, arg0, arg1);
10402 /* ... fall through ... */
10406 if (integer_zerop (arg1))
10407 return non_lvalue (fold_convert (type, arg0));
10408 if (integer_zerop (arg0))
10409 return omit_one_operand (type, arg0, arg1);
10411 /* Since negative shift count is not well-defined,
10412 don't try to compute it in the compiler. */
10413 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10416 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10417 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10418 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10419 && host_integerp (TREE_OPERAND (arg0, 1), false)
10420 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10422 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10423 + TREE_INT_CST_LOW (arg1));
10425 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10426 being well defined. */
10427 if (low >= TYPE_PRECISION (type))
10429 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10430 low = low % TYPE_PRECISION (type);
10431 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10432 return build_int_cst (type, 0);
10434 low = TYPE_PRECISION (type) - 1;
10437 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10438 build_int_cst (type, low));
10441 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10442 into x & ((unsigned)-1 >> c) for unsigned types. */
10443 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10444 || (TYPE_UNSIGNED (type)
10445 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10446 && host_integerp (arg1, false)
10447 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10448 && host_integerp (TREE_OPERAND (arg0, 1), false)
10449 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10451 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10452 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10458 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10460 lshift = build_int_cst (type, -1);
10461 lshift = int_const_binop (code, lshift, arg1, 0);
10463 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10467 /* Rewrite an LROTATE_EXPR by a constant into an
10468 RROTATE_EXPR by a new constant. */
10469 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10471 tree tem = build_int_cst (TREE_TYPE (arg1),
10472 GET_MODE_BITSIZE (TYPE_MODE (type)));
10473 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10474 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10477 /* If we have a rotate of a bit operation with the rotate count and
10478 the second operand of the bit operation both constant,
10479 permute the two operations. */
10480 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10481 && (TREE_CODE (arg0) == BIT_AND_EXPR
10482 || TREE_CODE (arg0) == BIT_IOR_EXPR
10483 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10484 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10485 return fold_build2 (TREE_CODE (arg0), type,
10486 fold_build2 (code, type,
10487 TREE_OPERAND (arg0, 0), arg1),
10488 fold_build2 (code, type,
10489 TREE_OPERAND (arg0, 1), arg1));
10491 /* Two consecutive rotates adding up to the width of the mode can
10493 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10494 && TREE_CODE (arg0) == RROTATE_EXPR
10495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10496 && TREE_INT_CST_HIGH (arg1) == 0
10497 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10498 && ((TREE_INT_CST_LOW (arg1)
10499 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10500 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10501 return TREE_OPERAND (arg0, 0);
10506 if (operand_equal_p (arg0, arg1, 0))
10507 return omit_one_operand (type, arg0, arg1);
10508 if (INTEGRAL_TYPE_P (type)
10509 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10510 return omit_one_operand (type, arg1, arg0);
10511 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10517 if (operand_equal_p (arg0, arg1, 0))
10518 return omit_one_operand (type, arg0, arg1);
10519 if (INTEGRAL_TYPE_P (type)
10520 && TYPE_MAX_VALUE (type)
10521 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10522 return omit_one_operand (type, arg1, arg0);
10523 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10528 case TRUTH_ANDIF_EXPR:
10529 /* Note that the operands of this must be ints
10530 and their values must be 0 or 1.
10531 ("true" is a fixed value perhaps depending on the language.) */
10532 /* If first arg is constant zero, return it. */
10533 if (integer_zerop (arg0))
10534 return fold_convert (type, arg0);
10535 case TRUTH_AND_EXPR:
10536 /* If either arg is constant true, drop it. */
10537 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10538 return non_lvalue (fold_convert (type, arg1));
10539 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10540 /* Preserve sequence points. */
10541 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10542 return non_lvalue (fold_convert (type, arg0));
10543 /* If second arg is constant zero, result is zero, but first arg
10544 must be evaluated. */
10545 if (integer_zerop (arg1))
10546 return omit_one_operand (type, arg1, arg0);
10547 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10548 case will be handled here. */
10549 if (integer_zerop (arg0))
10550 return omit_one_operand (type, arg0, arg1);
10552 /* !X && X is always false. */
10553 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10555 return omit_one_operand (type, integer_zero_node, arg1);
10556 /* X && !X is always false. */
10557 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10558 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10559 return omit_one_operand (type, integer_zero_node, arg0);
10561 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10562 means A >= Y && A != MAX, but in this case we know that
10565 if (!TREE_SIDE_EFFECTS (arg0)
10566 && !TREE_SIDE_EFFECTS (arg1))
10568 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10569 if (tem && !operand_equal_p (tem, arg0, 0))
10570 return fold_build2 (code, type, tem, arg1);
10572 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10573 if (tem && !operand_equal_p (tem, arg1, 0))
10574 return fold_build2 (code, type, arg0, tem);
10578 /* We only do these simplifications if we are optimizing. */
10582 /* Check for things like (A || B) && (A || C). We can convert this
10583 to A || (B && C). Note that either operator can be any of the four
10584 truth and/or operations and the transformation will still be
10585 valid. Also note that we only care about order for the
10586 ANDIF and ORIF operators. If B contains side effects, this
10587 might change the truth-value of A. */
10588 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10589 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10590 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10591 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10592 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10593 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10595 tree a00 = TREE_OPERAND (arg0, 0);
10596 tree a01 = TREE_OPERAND (arg0, 1);
10597 tree a10 = TREE_OPERAND (arg1, 0);
10598 tree a11 = TREE_OPERAND (arg1, 1);
10599 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10600 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10601 && (code == TRUTH_AND_EXPR
10602 || code == TRUTH_OR_EXPR));
10604 if (operand_equal_p (a00, a10, 0))
10605 return fold_build2 (TREE_CODE (arg0), type, a00,
10606 fold_build2 (code, type, a01, a11));
10607 else if (commutative && operand_equal_p (a00, a11, 0))
10608 return fold_build2 (TREE_CODE (arg0), type, a00,
10609 fold_build2 (code, type, a01, a10));
10610 else if (commutative && operand_equal_p (a01, a10, 0))
10611 return fold_build2 (TREE_CODE (arg0), type, a01,
10612 fold_build2 (code, type, a00, a11));
10614 /* This case if tricky because we must either have commutative
10615 operators or else A10 must not have side-effects. */
10617 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10618 && operand_equal_p (a01, a11, 0))
10619 return fold_build2 (TREE_CODE (arg0), type,
10620 fold_build2 (code, type, a00, a10),
10624 /* See if we can build a range comparison. */
10625 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10628 /* Check for the possibility of merging component references. If our
10629 lhs is another similar operation, try to merge its rhs with our
10630 rhs. Then try to merge our lhs and rhs. */
10631 if (TREE_CODE (arg0) == code
10632 && 0 != (tem = fold_truthop (code, type,
10633 TREE_OPERAND (arg0, 1), arg1)))
10634 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10636 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10641 case TRUTH_ORIF_EXPR:
10642 /* Note that the operands of this must be ints
10643 and their values must be 0 or true.
10644 ("true" is a fixed value perhaps depending on the language.) */
10645 /* If first arg is constant true, return it. */
10646 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10647 return fold_convert (type, arg0);
10648 case TRUTH_OR_EXPR:
10649 /* If either arg is constant zero, drop it. */
10650 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10651 return non_lvalue (fold_convert (type, arg1));
10652 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10653 /* Preserve sequence points. */
10654 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10655 return non_lvalue (fold_convert (type, arg0));
10656 /* If second arg is constant true, result is true, but we must
10657 evaluate first arg. */
10658 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10659 return omit_one_operand (type, arg1, arg0);
10660 /* Likewise for first arg, but note this only occurs here for
10662 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10663 return omit_one_operand (type, arg0, arg1);
10665 /* !X || X is always true. */
10666 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10667 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10668 return omit_one_operand (type, integer_one_node, arg1);
10669 /* X || !X is always true. */
10670 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10671 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10672 return omit_one_operand (type, integer_one_node, arg0);
10676 case TRUTH_XOR_EXPR:
10677 /* If the second arg is constant zero, drop it. */
10678 if (integer_zerop (arg1))
10679 return non_lvalue (fold_convert (type, arg0));
10680 /* If the second arg is constant true, this is a logical inversion. */
10681 if (integer_onep (arg1))
10683 /* Only call invert_truthvalue if operand is a truth value. */
10684 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10685 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10687 tem = invert_truthvalue (arg0);
10688 return non_lvalue (fold_convert (type, tem));
10690 /* Identical arguments cancel to zero. */
10691 if (operand_equal_p (arg0, arg1, 0))
10692 return omit_one_operand (type, integer_zero_node, arg0);
10694 /* !X ^ X is always true. */
10695 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10696 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10697 return omit_one_operand (type, integer_one_node, arg1);
10699 /* X ^ !X is always true. */
10700 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10701 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10702 return omit_one_operand (type, integer_one_node, arg0);
10708 tem = fold_comparison (code, type, op0, op1);
10709 if (tem != NULL_TREE)
10712 /* bool_var != 0 becomes bool_var. */
10713 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10714 && code == NE_EXPR)
10715 return non_lvalue (fold_convert (type, arg0));
10717 /* bool_var == 1 becomes bool_var. */
10718 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10719 && code == EQ_EXPR)
10720 return non_lvalue (fold_convert (type, arg0));
10722 /* bool_var != 1 becomes !bool_var. */
10723 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10724 && code == NE_EXPR)
10725 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10727 /* bool_var == 0 becomes !bool_var. */
10728 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10729 && code == EQ_EXPR)
10730 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10732 /* If this is an equality comparison of the address of a non-weak
10733 object against zero, then we know the result. */
10734 if (TREE_CODE (arg0) == ADDR_EXPR
10735 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10736 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10737 && integer_zerop (arg1))
10738 return constant_boolean_node (code != EQ_EXPR, type);
10740 /* If this is an equality comparison of the address of two non-weak,
10741 unaliased symbols neither of which are extern (since we do not
10742 have access to attributes for externs), then we know the result. */
10743 if (TREE_CODE (arg0) == ADDR_EXPR
10744 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10745 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10746 && ! lookup_attribute ("alias",
10747 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10748 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10749 && TREE_CODE (arg1) == ADDR_EXPR
10750 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10751 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10752 && ! lookup_attribute ("alias",
10753 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10754 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10756 /* We know that we're looking at the address of two
10757 non-weak, unaliased, static _DECL nodes.
10759 It is both wasteful and incorrect to call operand_equal_p
10760 to compare the two ADDR_EXPR nodes. It is wasteful in that
10761 all we need to do is test pointer equality for the arguments
10762 to the two ADDR_EXPR nodes. It is incorrect to use
10763 operand_equal_p as that function is NOT equivalent to a
10764 C equality test. It can in fact return false for two
10765 objects which would test as equal using the C equality
10767 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10768 return constant_boolean_node (equal
10769 ? code == EQ_EXPR : code != EQ_EXPR,
10773 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10774 a MINUS_EXPR of a constant, we can convert it into a comparison with
10775 a revised constant as long as no overflow occurs. */
10776 if (TREE_CODE (arg1) == INTEGER_CST
10777 && (TREE_CODE (arg0) == PLUS_EXPR
10778 || TREE_CODE (arg0) == MINUS_EXPR)
10779 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10780 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10781 ? MINUS_EXPR : PLUS_EXPR,
10782 fold_convert (TREE_TYPE (arg0), arg1),
10783 TREE_OPERAND (arg0, 1), 0))
10784 && ! TREE_CONSTANT_OVERFLOW (tem))
10785 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10787 /* Similarly for a NEGATE_EXPR. */
10788 if (TREE_CODE (arg0) == NEGATE_EXPR
10789 && TREE_CODE (arg1) == INTEGER_CST
10790 && 0 != (tem = negate_expr (arg1))
10791 && TREE_CODE (tem) == INTEGER_CST
10792 && ! TREE_CONSTANT_OVERFLOW (tem))
10793 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10795 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10796 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10797 && TREE_CODE (arg1) == INTEGER_CST
10798 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10799 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10800 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10801 fold_convert (TREE_TYPE (arg0), arg1),
10802 TREE_OPERAND (arg0, 1)));
10804 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10805 for !=. Don't do this for ordered comparisons due to overflow. */
10806 if (TREE_CODE (arg0) == MINUS_EXPR
10807 && integer_zerop (arg1))
10808 return fold_build2 (code, type,
10809 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10811 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10812 if (TREE_CODE (arg0) == ABS_EXPR
10813 && (integer_zerop (arg1) || real_zerop (arg1)))
10814 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10816 /* If this is an EQ or NE comparison with zero and ARG0 is
10817 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10818 two operations, but the latter can be done in one less insn
10819 on machines that have only two-operand insns or on which a
10820 constant cannot be the first operand. */
10821 if (TREE_CODE (arg0) == BIT_AND_EXPR
10822 && integer_zerop (arg1))
10824 tree arg00 = TREE_OPERAND (arg0, 0);
10825 tree arg01 = TREE_OPERAND (arg0, 1);
10826 if (TREE_CODE (arg00) == LSHIFT_EXPR
10827 && integer_onep (TREE_OPERAND (arg00, 0)))
10829 fold_build2 (code, type,
10830 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10831 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10832 arg01, TREE_OPERAND (arg00, 1)),
10833 fold_convert (TREE_TYPE (arg0),
10834 integer_one_node)),
10836 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10837 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10839 fold_build2 (code, type,
10840 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10841 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10842 arg00, TREE_OPERAND (arg01, 1)),
10843 fold_convert (TREE_TYPE (arg0),
10844 integer_one_node)),
10848 /* If this is an NE or EQ comparison of zero against the result of a
10849 signed MOD operation whose second operand is a power of 2, make
10850 the MOD operation unsigned since it is simpler and equivalent. */
10851 if (integer_zerop (arg1)
10852 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10853 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10854 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10855 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10856 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10857 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10859 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10860 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10861 fold_convert (newtype,
10862 TREE_OPERAND (arg0, 0)),
10863 fold_convert (newtype,
10864 TREE_OPERAND (arg0, 1)));
10866 return fold_build2 (code, type, newmod,
10867 fold_convert (newtype, arg1));
10870 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10871 C1 is a valid shift constant, and C2 is a power of two, i.e.
10873 if (TREE_CODE (arg0) == BIT_AND_EXPR
10874 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10875 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10877 && integer_pow2p (TREE_OPERAND (arg0, 1))
10878 && integer_zerop (arg1))
10880 tree itype = TREE_TYPE (arg0);
10881 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10882 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10884 /* Check for a valid shift count. */
10885 if (TREE_INT_CST_HIGH (arg001) == 0
10886 && TREE_INT_CST_LOW (arg001) < prec)
10888 tree arg01 = TREE_OPERAND (arg0, 1);
10889 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10890 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10891 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10892 can be rewritten as (X & (C2 << C1)) != 0. */
10893 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10895 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10896 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10897 return fold_build2 (code, type, tem, arg1);
10899 /* Otherwise, for signed (arithmetic) shifts,
10900 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10901 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10902 else if (!TYPE_UNSIGNED (itype))
10903 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10904 arg000, build_int_cst (itype, 0));
10905 /* Otherwise, of unsigned (logical) shifts,
10906 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10907 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10909 return omit_one_operand (type,
10910 code == EQ_EXPR ? integer_one_node
10911 : integer_zero_node,
10916 /* If this is an NE comparison of zero with an AND of one, remove the
10917 comparison since the AND will give the correct value. */
10918 if (code == NE_EXPR
10919 && integer_zerop (arg1)
10920 && TREE_CODE (arg0) == BIT_AND_EXPR
10921 && integer_onep (TREE_OPERAND (arg0, 1)))
10922 return fold_convert (type, arg0);
10924 /* If we have (A & C) == C where C is a power of 2, convert this into
10925 (A & C) != 0. Similarly for NE_EXPR. */
10926 if (TREE_CODE (arg0) == BIT_AND_EXPR
10927 && integer_pow2p (TREE_OPERAND (arg0, 1))
10928 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10929 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10930 arg0, fold_convert (TREE_TYPE (arg0),
10931 integer_zero_node));
10933 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10934 bit, then fold the expression into A < 0 or A >= 0. */
10935 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10939 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10940 Similarly for NE_EXPR. */
10941 if (TREE_CODE (arg0) == BIT_AND_EXPR
10942 && TREE_CODE (arg1) == INTEGER_CST
10943 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10945 tree notc = fold_build1 (BIT_NOT_EXPR,
10946 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10947 TREE_OPERAND (arg0, 1));
10948 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10950 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10951 if (integer_nonzerop (dandnotc))
10952 return omit_one_operand (type, rslt, arg0);
10955 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10956 Similarly for NE_EXPR. */
10957 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10958 && TREE_CODE (arg1) == INTEGER_CST
10959 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10961 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10962 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10963 TREE_OPERAND (arg0, 1), notd);
10964 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10965 if (integer_nonzerop (candnotd))
10966 return omit_one_operand (type, rslt, arg0);
10969 /* If this is a comparison of a field, we may be able to simplify it. */
10970 if ((TREE_CODE (arg0) == COMPONENT_REF
10971 || TREE_CODE (arg0) == BIT_FIELD_REF)
10972 /* Handle the constant case even without -O
10973 to make sure the warnings are given. */
10974 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10976 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10981 /* Optimize comparisons of strlen vs zero to a compare of the
10982 first character of the string vs zero. To wit,
10983 strlen(ptr) == 0 => *ptr == 0
10984 strlen(ptr) != 0 => *ptr != 0
10985 Other cases should reduce to one of these two (or a constant)
10986 due to the return value of strlen being unsigned. */
10987 if (TREE_CODE (arg0) == CALL_EXPR
10988 && integer_zerop (arg1))
10990 tree fndecl = get_callee_fndecl (arg0);
10994 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10995 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10996 && (arglist = TREE_OPERAND (arg0, 1))
10997 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10998 && ! TREE_CHAIN (arglist))
11000 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
11001 return fold_build2 (code, type, iref,
11002 build_int_cst (TREE_TYPE (iref), 0));
11006 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11007 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11008 if (TREE_CODE (arg0) == RSHIFT_EXPR
11009 && integer_zerop (arg1)
11010 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11012 tree arg00 = TREE_OPERAND (arg0, 0);
11013 tree arg01 = TREE_OPERAND (arg0, 1);
11014 tree itype = TREE_TYPE (arg00);
11015 if (TREE_INT_CST_HIGH (arg01) == 0
11016 && TREE_INT_CST_LOW (arg01)
11017 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11019 if (TYPE_UNSIGNED (itype))
11021 itype = lang_hooks.types.signed_type (itype);
11022 arg00 = fold_convert (itype, arg00);
11024 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11025 type, arg00, build_int_cst (itype, 0));
11029 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11030 if (integer_zerop (arg1)
11031 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11032 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11033 TREE_OPERAND (arg0, 1));
11035 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11036 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11037 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11038 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11039 build_int_cst (TREE_TYPE (arg1), 0));
11040 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11041 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11042 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11043 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11044 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11045 build_int_cst (TREE_TYPE (arg1), 0));
11047 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11048 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11049 && TREE_CODE (arg1) == INTEGER_CST
11050 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11051 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11052 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11053 TREE_OPERAND (arg0, 1), arg1));
11055 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11056 (X & C) == 0 when C is a single bit. */
11057 if (TREE_CODE (arg0) == BIT_AND_EXPR
11058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11059 && integer_zerop (arg1)
11060 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11062 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11063 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11064 TREE_OPERAND (arg0, 1));
11065 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11069 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11070 constant C is a power of two, i.e. a single bit. */
11071 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11072 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11073 && integer_zerop (arg1)
11074 && integer_pow2p (TREE_OPERAND (arg0, 1))
11075 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11076 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11078 tree arg00 = TREE_OPERAND (arg0, 0);
11079 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11080 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11083 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11084 when is C is a power of two, i.e. a single bit. */
11085 if (TREE_CODE (arg0) == BIT_AND_EXPR
11086 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11087 && integer_zerop (arg1)
11088 && integer_pow2p (TREE_OPERAND (arg0, 1))
11089 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11090 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11092 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11093 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11094 arg000, TREE_OPERAND (arg0, 1));
11095 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11096 tem, build_int_cst (TREE_TYPE (tem), 0));
11099 if (integer_zerop (arg1)
11100 && tree_expr_nonzero_p (arg0))
11102 tree res = constant_boolean_node (code==NE_EXPR, type);
11103 return omit_one_operand (type, res, arg0);
11106 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11107 if (TREE_CODE (arg0) == NEGATE_EXPR
11108 && TREE_CODE (arg1) == NEGATE_EXPR)
11109 return fold_build2 (code, type,
11110 TREE_OPERAND (arg0, 0),
11111 TREE_OPERAND (arg1, 0));
11113 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11114 if (TREE_CODE (arg0) == BIT_AND_EXPR
11115 && TREE_CODE (arg1) == BIT_AND_EXPR)
11117 tree arg00 = TREE_OPERAND (arg0, 0);
11118 tree arg01 = TREE_OPERAND (arg0, 1);
11119 tree arg10 = TREE_OPERAND (arg1, 0);
11120 tree arg11 = TREE_OPERAND (arg1, 1);
11121 tree itype = TREE_TYPE (arg0);
11123 if (operand_equal_p (arg01, arg11, 0))
11124 return fold_build2 (code, type,
11125 fold_build2 (BIT_AND_EXPR, itype,
11126 fold_build2 (BIT_XOR_EXPR, itype,
11129 build_int_cst (itype, 0));
11131 if (operand_equal_p (arg01, arg10, 0))
11132 return fold_build2 (code, type,
11133 fold_build2 (BIT_AND_EXPR, itype,
11134 fold_build2 (BIT_XOR_EXPR, itype,
11137 build_int_cst (itype, 0));
11139 if (operand_equal_p (arg00, arg11, 0))
11140 return fold_build2 (code, type,
11141 fold_build2 (BIT_AND_EXPR, itype,
11142 fold_build2 (BIT_XOR_EXPR, itype,
11145 build_int_cst (itype, 0));
11147 if (operand_equal_p (arg00, arg10, 0))
11148 return fold_build2 (code, type,
11149 fold_build2 (BIT_AND_EXPR, itype,
11150 fold_build2 (BIT_XOR_EXPR, itype,
11153 build_int_cst (itype, 0));
11156 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11157 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11159 tree arg00 = TREE_OPERAND (arg0, 0);
11160 tree arg01 = TREE_OPERAND (arg0, 1);
11161 tree arg10 = TREE_OPERAND (arg1, 0);
11162 tree arg11 = TREE_OPERAND (arg1, 1);
11163 tree itype = TREE_TYPE (arg0);
11165 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11166 operand_equal_p guarantees no side-effects so we don't need
11167 to use omit_one_operand on Z. */
11168 if (operand_equal_p (arg01, arg11, 0))
11169 return fold_build2 (code, type, arg00, arg10);
11170 if (operand_equal_p (arg01, arg10, 0))
11171 return fold_build2 (code, type, arg00, arg11);
11172 if (operand_equal_p (arg00, arg11, 0))
11173 return fold_build2 (code, type, arg01, arg10);
11174 if (operand_equal_p (arg00, arg10, 0))
11175 return fold_build2 (code, type, arg01, arg11);
11177 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11178 if (TREE_CODE (arg01) == INTEGER_CST
11179 && TREE_CODE (arg11) == INTEGER_CST)
11180 return fold_build2 (code, type,
11181 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11182 fold_build2 (BIT_XOR_EXPR, itype,
11192 tem = fold_comparison (code, type, op0, op1);
11193 if (tem != NULL_TREE)
11196 /* Transform comparisons of the form X +- C CMP X. */
11197 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11198 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11199 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11200 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11201 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11202 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
11203 && !(flag_wrapv || flag_trapv))))
11205 tree arg01 = TREE_OPERAND (arg0, 1);
11206 enum tree_code code0 = TREE_CODE (arg0);
11209 if (TREE_CODE (arg01) == REAL_CST)
11210 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11212 is_positive = tree_int_cst_sgn (arg01);
11214 /* (X - c) > X becomes false. */
11215 if (code == GT_EXPR
11216 && ((code0 == MINUS_EXPR && is_positive >= 0)
11217 || (code0 == PLUS_EXPR && is_positive <= 0)))
11218 return constant_boolean_node (0, type);
11220 /* Likewise (X + c) < X becomes false. */
11221 if (code == LT_EXPR
11222 && ((code0 == PLUS_EXPR && is_positive >= 0)
11223 || (code0 == MINUS_EXPR && is_positive <= 0)))
11224 return constant_boolean_node (0, type);
11226 /* Convert (X - c) <= X to true. */
11227 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11229 && ((code0 == MINUS_EXPR && is_positive >= 0)
11230 || (code0 == PLUS_EXPR && is_positive <= 0)))
11231 return constant_boolean_node (1, type);
11233 /* Convert (X + c) >= X to true. */
11234 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11236 && ((code0 == PLUS_EXPR && is_positive >= 0)
11237 || (code0 == MINUS_EXPR && is_positive <= 0)))
11238 return constant_boolean_node (1, type);
11240 if (TREE_CODE (arg01) == INTEGER_CST)
11242 /* Convert X + c > X and X - c < X to true for integers. */
11243 if (code == GT_EXPR
11244 && ((code0 == PLUS_EXPR && is_positive > 0)
11245 || (code0 == MINUS_EXPR && is_positive < 0)))
11246 return constant_boolean_node (1, type);
11248 if (code == LT_EXPR
11249 && ((code0 == MINUS_EXPR && is_positive > 0)
11250 || (code0 == PLUS_EXPR && is_positive < 0)))
11251 return constant_boolean_node (1, type);
11253 /* Convert X + c <= X and X - c >= X to false for integers. */
11254 if (code == LE_EXPR
11255 && ((code0 == PLUS_EXPR && is_positive > 0)
11256 || (code0 == MINUS_EXPR && is_positive < 0)))
11257 return constant_boolean_node (0, type);
11259 if (code == GE_EXPR
11260 && ((code0 == MINUS_EXPR && is_positive > 0)
11261 || (code0 == PLUS_EXPR && is_positive < 0)))
11262 return constant_boolean_node (0, type);
11266 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11267 This transformation affects the cases which are handled in later
11268 optimizations involving comparisons with non-negative constants. */
11269 if (TREE_CODE (arg1) == INTEGER_CST
11270 && TREE_CODE (arg0) != INTEGER_CST
11271 && tree_int_cst_sgn (arg1) > 0)
11273 if (code == GE_EXPR)
11275 arg1 = const_binop (MINUS_EXPR, arg1,
11276 build_int_cst (TREE_TYPE (arg1), 1), 0);
11277 return fold_build2 (GT_EXPR, type, arg0,
11278 fold_convert (TREE_TYPE (arg0), arg1));
11280 if (code == LT_EXPR)
11282 arg1 = const_binop (MINUS_EXPR, arg1,
11283 build_int_cst (TREE_TYPE (arg1), 1), 0);
11284 return fold_build2 (LE_EXPR, type, arg0,
11285 fold_convert (TREE_TYPE (arg0), arg1));
11289 /* Comparisons with the highest or lowest possible integer of
11290 the specified precision will have known values. */
11292 tree arg1_type = TREE_TYPE (arg1);
11293 unsigned int width = TYPE_PRECISION (arg1_type);
11295 if (TREE_CODE (arg1) == INTEGER_CST
11296 && ! TREE_CONSTANT_OVERFLOW (arg1)
11297 && width <= 2 * HOST_BITS_PER_WIDE_INT
11298 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11300 HOST_WIDE_INT signed_max_hi;
11301 unsigned HOST_WIDE_INT signed_max_lo;
11302 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11304 if (width <= HOST_BITS_PER_WIDE_INT)
11306 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11311 if (TYPE_UNSIGNED (arg1_type))
11313 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11319 max_lo = signed_max_lo;
11320 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11326 width -= HOST_BITS_PER_WIDE_INT;
11327 signed_max_lo = -1;
11328 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11333 if (TYPE_UNSIGNED (arg1_type))
11335 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11340 max_hi = signed_max_hi;
11341 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11345 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11346 && TREE_INT_CST_LOW (arg1) == max_lo)
11350 return omit_one_operand (type, integer_zero_node, arg0);
11353 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11356 return omit_one_operand (type, integer_one_node, arg0);
11359 return fold_build2 (NE_EXPR, type, arg0, arg1);
11361 /* The GE_EXPR and LT_EXPR cases above are not normally
11362 reached because of previous transformations. */
11367 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11369 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11373 arg1 = const_binop (PLUS_EXPR, arg1,
11374 build_int_cst (TREE_TYPE (arg1), 1), 0);
11375 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11377 arg1 = const_binop (PLUS_EXPR, arg1,
11378 build_int_cst (TREE_TYPE (arg1), 1), 0);
11379 return fold_build2 (NE_EXPR, type, arg0, arg1);
11383 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11385 && TREE_INT_CST_LOW (arg1) == min_lo)
11389 return omit_one_operand (type, integer_zero_node, arg0);
11392 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11395 return omit_one_operand (type, integer_one_node, arg0);
11398 return fold_build2 (NE_EXPR, type, op0, op1);
11403 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11405 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11409 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11410 return fold_build2 (NE_EXPR, type, arg0, arg1);
11412 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11413 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11418 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11419 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11420 && TYPE_UNSIGNED (arg1_type)
11421 /* We will flip the signedness of the comparison operator
11422 associated with the mode of arg1, so the sign bit is
11423 specified by this mode. Check that arg1 is the signed
11424 max associated with this sign bit. */
11425 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11426 /* signed_type does not work on pointer types. */
11427 && INTEGRAL_TYPE_P (arg1_type))
11429 /* The following case also applies to X < signed_max+1
11430 and X >= signed_max+1 because previous transformations. */
11431 if (code == LE_EXPR || code == GT_EXPR)
11434 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11435 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11436 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11437 type, fold_convert (st0, arg0),
11438 build_int_cst (st1, 0));
11444 /* If we are comparing an ABS_EXPR with a constant, we can
11445 convert all the cases into explicit comparisons, but they may
11446 well not be faster than doing the ABS and one comparison.
11447 But ABS (X) <= C is a range comparison, which becomes a subtraction
11448 and a comparison, and is probably faster. */
11449 if (code == LE_EXPR
11450 && TREE_CODE (arg1) == INTEGER_CST
11451 && TREE_CODE (arg0) == ABS_EXPR
11452 && ! TREE_SIDE_EFFECTS (arg0)
11453 && (0 != (tem = negate_expr (arg1)))
11454 && TREE_CODE (tem) == INTEGER_CST
11455 && ! TREE_CONSTANT_OVERFLOW (tem))
11456 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11457 build2 (GE_EXPR, type,
11458 TREE_OPERAND (arg0, 0), tem),
11459 build2 (LE_EXPR, type,
11460 TREE_OPERAND (arg0, 0), arg1));
11462 /* Convert ABS_EXPR<x> >= 0 to true. */
11463 if (code == GE_EXPR
11464 && tree_expr_nonnegative_p (arg0)
11465 && (integer_zerop (arg1)
11466 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11467 && real_zerop (arg1))))
11468 return omit_one_operand (type, integer_one_node, arg0);
11470 /* Convert ABS_EXPR<x> < 0 to false. */
11471 if (code == LT_EXPR
11472 && tree_expr_nonnegative_p (arg0)
11473 && (integer_zerop (arg1) || real_zerop (arg1)))
11474 return omit_one_operand (type, integer_zero_node, arg0);
11476 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11477 and similarly for >= into !=. */
11478 if ((code == LT_EXPR || code == GE_EXPR)
11479 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11480 && TREE_CODE (arg1) == LSHIFT_EXPR
11481 && integer_onep (TREE_OPERAND (arg1, 0)))
11482 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11483 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11484 TREE_OPERAND (arg1, 1)),
11485 build_int_cst (TREE_TYPE (arg0), 0));
11487 if ((code == LT_EXPR || code == GE_EXPR)
11488 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11489 && (TREE_CODE (arg1) == NOP_EXPR
11490 || TREE_CODE (arg1) == CONVERT_EXPR)
11491 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11492 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11494 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11495 fold_convert (TREE_TYPE (arg0),
11496 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11497 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11499 build_int_cst (TREE_TYPE (arg0), 0));
11503 case UNORDERED_EXPR:
11511 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11513 t1 = fold_relational_const (code, type, arg0, arg1);
11514 if (t1 != NULL_TREE)
11518 /* If the first operand is NaN, the result is constant. */
11519 if (TREE_CODE (arg0) == REAL_CST
11520 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11521 && (code != LTGT_EXPR || ! flag_trapping_math))
11523 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11524 ? integer_zero_node
11525 : integer_one_node;
11526 return omit_one_operand (type, t1, arg1);
11529 /* If the second operand is NaN, the result is constant. */
11530 if (TREE_CODE (arg1) == REAL_CST
11531 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11532 && (code != LTGT_EXPR || ! flag_trapping_math))
11534 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11535 ? integer_zero_node
11536 : integer_one_node;
11537 return omit_one_operand (type, t1, arg0);
11540 /* Simplify unordered comparison of something with itself. */
11541 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11542 && operand_equal_p (arg0, arg1, 0))
11543 return constant_boolean_node (1, type);
11545 if (code == LTGT_EXPR
11546 && !flag_trapping_math
11547 && operand_equal_p (arg0, arg1, 0))
11548 return constant_boolean_node (0, type);
11550 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11552 tree targ0 = strip_float_extensions (arg0);
11553 tree targ1 = strip_float_extensions (arg1);
11554 tree newtype = TREE_TYPE (targ0);
11556 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11557 newtype = TREE_TYPE (targ1);
11559 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11560 return fold_build2 (code, type, fold_convert (newtype, targ0),
11561 fold_convert (newtype, targ1));
11566 case COMPOUND_EXPR:
11567 /* When pedantic, a compound expression can be neither an lvalue
11568 nor an integer constant expression. */
11569 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11571 /* Don't let (0, 0) be null pointer constant. */
11572 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11573 : fold_convert (type, arg1);
11574 return pedantic_non_lvalue (tem);
11577 if ((TREE_CODE (arg0) == REAL_CST
11578 && TREE_CODE (arg1) == REAL_CST)
11579 || (TREE_CODE (arg0) == INTEGER_CST
11580 && TREE_CODE (arg1) == INTEGER_CST))
11581 return build_complex (type, arg0, arg1);
11585 /* An ASSERT_EXPR should never be passed to fold_binary. */
11586 gcc_unreachable ();
11590 } /* switch (code) */
11593 /* Callback for walk_tree, looking for LABEL_EXPR.
11594 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11595 Do not check the sub-tree of GOTO_EXPR. */
11598 contains_label_1 (tree *tp,
11599 int *walk_subtrees,
11600 void *data ATTRIBUTE_UNUSED)
11602 switch (TREE_CODE (*tp))
11607 *walk_subtrees = 0;
11614 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11615 accessible from outside the sub-tree. Returns NULL_TREE if no
11616 addressable label is found. */
11619 contains_label_p (tree st)
11621 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11624 /* Fold a ternary expression of code CODE and type TYPE with operands
11625 OP0, OP1, and OP2. Return the folded expression if folding is
11626 successful. Otherwise, return NULL_TREE. */
11629 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11632 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11633 enum tree_code_class kind = TREE_CODE_CLASS (code);
11635 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11636 && TREE_CODE_LENGTH (code) == 3);
11638 /* Strip any conversions that don't change the mode. This is safe
11639 for every expression, except for a comparison expression because
11640 its signedness is derived from its operands. So, in the latter
11641 case, only strip conversions that don't change the signedness.
11643 Note that this is done as an internal manipulation within the
11644 constant folder, in order to find the simplest representation of
11645 the arguments so that their form can be studied. In any cases,
11646 the appropriate type conversions should be put back in the tree
11647 that will get out of the constant folder. */
11662 case COMPONENT_REF:
11663 if (TREE_CODE (arg0) == CONSTRUCTOR
11664 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11666 unsigned HOST_WIDE_INT idx;
11668 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11675 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11676 so all simple results must be passed through pedantic_non_lvalue. */
11677 if (TREE_CODE (arg0) == INTEGER_CST)
11679 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11680 tem = integer_zerop (arg0) ? op2 : op1;
11681 /* Only optimize constant conditions when the selected branch
11682 has the same type as the COND_EXPR. This avoids optimizing
11683 away "c ? x : throw", where the throw has a void type.
11684 Avoid throwing away that operand which contains label. */
11685 if ((!TREE_SIDE_EFFECTS (unused_op)
11686 || !contains_label_p (unused_op))
11687 && (! VOID_TYPE_P (TREE_TYPE (tem))
11688 || VOID_TYPE_P (type)))
11689 return pedantic_non_lvalue (tem);
11692 if (operand_equal_p (arg1, op2, 0))
11693 return pedantic_omit_one_operand (type, arg1, arg0);
11695 /* If we have A op B ? A : C, we may be able to convert this to a
11696 simpler expression, depending on the operation and the values
11697 of B and C. Signed zeros prevent all of these transformations,
11698 for reasons given above each one.
11700 Also try swapping the arguments and inverting the conditional. */
11701 if (COMPARISON_CLASS_P (arg0)
11702 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11703 arg1, TREE_OPERAND (arg0, 1))
11704 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11706 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11711 if (COMPARISON_CLASS_P (arg0)
11712 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11714 TREE_OPERAND (arg0, 1))
11715 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11717 tem = fold_truth_not_expr (arg0);
11718 if (tem && COMPARISON_CLASS_P (tem))
11720 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11726 /* If the second operand is simpler than the third, swap them
11727 since that produces better jump optimization results. */
11728 if (truth_value_p (TREE_CODE (arg0))
11729 && tree_swap_operands_p (op1, op2, false))
11731 /* See if this can be inverted. If it can't, possibly because
11732 it was a floating-point inequality comparison, don't do
11734 tem = fold_truth_not_expr (arg0);
11736 return fold_build3 (code, type, tem, op2, op1);
11739 /* Convert A ? 1 : 0 to simply A. */
11740 if (integer_onep (op1)
11741 && integer_zerop (op2)
11742 /* If we try to convert OP0 to our type, the
11743 call to fold will try to move the conversion inside
11744 a COND, which will recurse. In that case, the COND_EXPR
11745 is probably the best choice, so leave it alone. */
11746 && type == TREE_TYPE (arg0))
11747 return pedantic_non_lvalue (arg0);
11749 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11750 over COND_EXPR in cases such as floating point comparisons. */
11751 if (integer_zerop (op1)
11752 && integer_onep (op2)
11753 && truth_value_p (TREE_CODE (arg0)))
11754 return pedantic_non_lvalue (fold_convert (type,
11755 invert_truthvalue (arg0)));
11757 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11758 if (TREE_CODE (arg0) == LT_EXPR
11759 && integer_zerop (TREE_OPERAND (arg0, 1))
11760 && integer_zerop (op2)
11761 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11763 /* sign_bit_p only checks ARG1 bits within A's precision.
11764 If <sign bit of A> has wider type than A, bits outside
11765 of A's precision in <sign bit of A> need to be checked.
11766 If they are all 0, this optimization needs to be done
11767 in unsigned A's type, if they are all 1 in signed A's type,
11768 otherwise this can't be done. */
11769 if (TYPE_PRECISION (TREE_TYPE (tem))
11770 < TYPE_PRECISION (TREE_TYPE (arg1))
11771 && TYPE_PRECISION (TREE_TYPE (tem))
11772 < TYPE_PRECISION (type))
11774 unsigned HOST_WIDE_INT mask_lo;
11775 HOST_WIDE_INT mask_hi;
11776 int inner_width, outer_width;
11779 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11780 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11781 if (outer_width > TYPE_PRECISION (type))
11782 outer_width = TYPE_PRECISION (type);
11784 if (outer_width > HOST_BITS_PER_WIDE_INT)
11786 mask_hi = ((unsigned HOST_WIDE_INT) -1
11787 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11793 mask_lo = ((unsigned HOST_WIDE_INT) -1
11794 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11796 if (inner_width > HOST_BITS_PER_WIDE_INT)
11798 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11799 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11803 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11804 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11806 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11807 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11809 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11810 tem = fold_convert (tem_type, tem);
11812 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11813 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11815 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11816 tem = fold_convert (tem_type, tem);
11823 return fold_convert (type,
11824 fold_build2 (BIT_AND_EXPR,
11825 TREE_TYPE (tem), tem,
11826 fold_convert (TREE_TYPE (tem),
11830 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11831 already handled above. */
11832 if (TREE_CODE (arg0) == BIT_AND_EXPR
11833 && integer_onep (TREE_OPERAND (arg0, 1))
11834 && integer_zerop (op2)
11835 && integer_pow2p (arg1))
11837 tree tem = TREE_OPERAND (arg0, 0);
11839 if (TREE_CODE (tem) == RSHIFT_EXPR
11840 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11841 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11842 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11843 return fold_build2 (BIT_AND_EXPR, type,
11844 TREE_OPERAND (tem, 0), arg1);
11847 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11848 is probably obsolete because the first operand should be a
11849 truth value (that's why we have the two cases above), but let's
11850 leave it in until we can confirm this for all front-ends. */
11851 if (integer_zerop (op2)
11852 && TREE_CODE (arg0) == NE_EXPR
11853 && integer_zerop (TREE_OPERAND (arg0, 1))
11854 && integer_pow2p (arg1)
11855 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11856 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11857 arg1, OEP_ONLY_CONST))
11858 return pedantic_non_lvalue (fold_convert (type,
11859 TREE_OPERAND (arg0, 0)));
11861 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11862 if (integer_zerop (op2)
11863 && truth_value_p (TREE_CODE (arg0))
11864 && truth_value_p (TREE_CODE (arg1)))
11865 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11866 fold_convert (type, arg0),
11869 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11870 if (integer_onep (op2)
11871 && truth_value_p (TREE_CODE (arg0))
11872 && truth_value_p (TREE_CODE (arg1)))
11874 /* Only perform transformation if ARG0 is easily inverted. */
11875 tem = fold_truth_not_expr (arg0);
11877 return fold_build2 (TRUTH_ORIF_EXPR, type,
11878 fold_convert (type, tem),
11882 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11883 if (integer_zerop (arg1)
11884 && truth_value_p (TREE_CODE (arg0))
11885 && truth_value_p (TREE_CODE (op2)))
11887 /* Only perform transformation if ARG0 is easily inverted. */
11888 tem = fold_truth_not_expr (arg0);
11890 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11891 fold_convert (type, tem),
11895 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11896 if (integer_onep (arg1)
11897 && truth_value_p (TREE_CODE (arg0))
11898 && truth_value_p (TREE_CODE (op2)))
11899 return fold_build2 (TRUTH_ORIF_EXPR, type,
11900 fold_convert (type, arg0),
11906 /* Check for a built-in function. */
11907 if (TREE_CODE (op0) == ADDR_EXPR
11908 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11909 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11910 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11913 case BIT_FIELD_REF:
11914 if (TREE_CODE (arg0) == VECTOR_CST
11915 && type == TREE_TYPE (TREE_TYPE (arg0))
11916 && host_integerp (arg1, 1)
11917 && host_integerp (op2, 1))
11919 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11920 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11923 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11924 && (idx % width) == 0
11925 && (idx = idx / width)
11926 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11928 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11929 while (idx-- > 0 && elements)
11930 elements = TREE_CHAIN (elements);
11932 return TREE_VALUE (elements);
11934 return fold_convert (type, integer_zero_node);
11941 } /* switch (code) */
11944 /* Perform constant folding and related simplification of EXPR.
11945 The related simplifications include x*1 => x, x*0 => 0, etc.,
11946 and application of the associative law.
11947 NOP_EXPR conversions may be removed freely (as long as we
11948 are careful not to change the type of the overall expression).
11949 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11950 but we can constant-fold them if they have constant operands. */
11952 #ifdef ENABLE_FOLD_CHECKING
11953 # define fold(x) fold_1 (x)
11954 static tree fold_1 (tree);
11960 const tree t = expr;
11961 enum tree_code code = TREE_CODE (t);
11962 enum tree_code_class kind = TREE_CODE_CLASS (code);
11965 /* Return right away if a constant. */
11966 if (kind == tcc_constant)
11969 if (IS_EXPR_CODE_CLASS (kind)
11970 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11972 tree type = TREE_TYPE (t);
11973 tree op0, op1, op2;
11975 switch (TREE_CODE_LENGTH (code))
11978 op0 = TREE_OPERAND (t, 0);
11979 tem = fold_unary (code, type, op0);
11980 return tem ? tem : expr;
11982 op0 = TREE_OPERAND (t, 0);
11983 op1 = TREE_OPERAND (t, 1);
11984 tem = fold_binary (code, type, op0, op1);
11985 return tem ? tem : expr;
11987 op0 = TREE_OPERAND (t, 0);
11988 op1 = TREE_OPERAND (t, 1);
11989 op2 = TREE_OPERAND (t, 2);
11990 tem = fold_ternary (code, type, op0, op1, op2);
11991 return tem ? tem : expr;
12000 return fold (DECL_INITIAL (t));
12004 } /* switch (code) */
12007 #ifdef ENABLE_FOLD_CHECKING
12010 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12011 static void fold_check_failed (tree, tree);
12012 void print_fold_checksum (tree);
12014 /* When --enable-checking=fold, compute a digest of expr before
12015 and after actual fold call to see if fold did not accidentally
12016 change original expr. */
12022 struct md5_ctx ctx;
12023 unsigned char checksum_before[16], checksum_after[16];
12026 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12027 md5_init_ctx (&ctx);
12028 fold_checksum_tree (expr, &ctx, ht);
12029 md5_finish_ctx (&ctx, checksum_before);
12032 ret = fold_1 (expr);
12034 md5_init_ctx (&ctx);
12035 fold_checksum_tree (expr, &ctx, ht);
12036 md5_finish_ctx (&ctx, checksum_after);
12039 if (memcmp (checksum_before, checksum_after, 16))
12040 fold_check_failed (expr, ret);
12046 print_fold_checksum (tree expr)
12048 struct md5_ctx ctx;
12049 unsigned char checksum[16], cnt;
12052 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12053 md5_init_ctx (&ctx);
12054 fold_checksum_tree (expr, &ctx, ht);
12055 md5_finish_ctx (&ctx, checksum);
12057 for (cnt = 0; cnt < 16; ++cnt)
12058 fprintf (stderr, "%02x", checksum[cnt]);
12059 putc ('\n', stderr);
12063 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12065 internal_error ("fold check: original tree changed by fold");
12069 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12072 enum tree_code code;
12073 struct tree_function_decl buf;
12078 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12079 <= sizeof (struct tree_function_decl))
12080 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12083 slot = htab_find_slot (ht, expr, INSERT);
12087 code = TREE_CODE (expr);
12088 if (TREE_CODE_CLASS (code) == tcc_declaration
12089 && DECL_ASSEMBLER_NAME_SET_P (expr))
12091 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12092 memcpy ((char *) &buf, expr, tree_size (expr));
12093 expr = (tree) &buf;
12094 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12096 else if (TREE_CODE_CLASS (code) == tcc_type
12097 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12098 || TYPE_CACHED_VALUES_P (expr)
12099 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12101 /* Allow these fields to be modified. */
12102 memcpy ((char *) &buf, expr, tree_size (expr));
12103 expr = (tree) &buf;
12104 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12105 TYPE_POINTER_TO (expr) = NULL;
12106 TYPE_REFERENCE_TO (expr) = NULL;
12107 if (TYPE_CACHED_VALUES_P (expr))
12109 TYPE_CACHED_VALUES_P (expr) = 0;
12110 TYPE_CACHED_VALUES (expr) = NULL;
12113 md5_process_bytes (expr, tree_size (expr), ctx);
12114 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12115 if (TREE_CODE_CLASS (code) != tcc_type
12116 && TREE_CODE_CLASS (code) != tcc_declaration
12117 && code != TREE_LIST)
12118 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12119 switch (TREE_CODE_CLASS (code))
12125 md5_process_bytes (TREE_STRING_POINTER (expr),
12126 TREE_STRING_LENGTH (expr), ctx);
12129 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12130 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12133 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12139 case tcc_exceptional:
12143 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12144 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12145 expr = TREE_CHAIN (expr);
12146 goto recursive_label;
12149 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12150 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12156 case tcc_expression:
12157 case tcc_reference:
12158 case tcc_comparison:
12161 case tcc_statement:
12162 len = TREE_CODE_LENGTH (code);
12163 for (i = 0; i < len; ++i)
12164 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12166 case tcc_declaration:
12167 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12168 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12169 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12171 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12172 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12173 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12174 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12175 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12177 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12178 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12180 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12182 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12183 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12184 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12188 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12189 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12190 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12191 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12192 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12193 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12194 if (INTEGRAL_TYPE_P (expr)
12195 || SCALAR_FLOAT_TYPE_P (expr))
12197 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12198 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12200 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12201 if (TREE_CODE (expr) == RECORD_TYPE
12202 || TREE_CODE (expr) == UNION_TYPE
12203 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12204 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12205 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12214 /* Fold a unary tree expression with code CODE of type TYPE with an
12215 operand OP0. Return a folded expression if successful. Otherwise,
12216 return a tree expression with code CODE of type TYPE with an
12220 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12223 #ifdef ENABLE_FOLD_CHECKING
12224 unsigned char checksum_before[16], checksum_after[16];
12225 struct md5_ctx ctx;
12228 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12229 md5_init_ctx (&ctx);
12230 fold_checksum_tree (op0, &ctx, ht);
12231 md5_finish_ctx (&ctx, checksum_before);
12235 tem = fold_unary (code, type, op0);
12237 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12239 #ifdef ENABLE_FOLD_CHECKING
12240 md5_init_ctx (&ctx);
12241 fold_checksum_tree (op0, &ctx, ht);
12242 md5_finish_ctx (&ctx, checksum_after);
12245 if (memcmp (checksum_before, checksum_after, 16))
12246 fold_check_failed (op0, tem);
12251 /* Fold a binary tree expression with code CODE of type TYPE with
12252 operands OP0 and OP1. Return a folded expression if successful.
12253 Otherwise, return a tree expression with code CODE of type TYPE
12254 with operands OP0 and OP1. */
12257 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12261 #ifdef ENABLE_FOLD_CHECKING
12262 unsigned char checksum_before_op0[16],
12263 checksum_before_op1[16],
12264 checksum_after_op0[16],
12265 checksum_after_op1[16];
12266 struct md5_ctx ctx;
12269 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12270 md5_init_ctx (&ctx);
12271 fold_checksum_tree (op0, &ctx, ht);
12272 md5_finish_ctx (&ctx, checksum_before_op0);
12275 md5_init_ctx (&ctx);
12276 fold_checksum_tree (op1, &ctx, ht);
12277 md5_finish_ctx (&ctx, checksum_before_op1);
12281 tem = fold_binary (code, type, op0, op1);
12283 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12285 #ifdef ENABLE_FOLD_CHECKING
12286 md5_init_ctx (&ctx);
12287 fold_checksum_tree (op0, &ctx, ht);
12288 md5_finish_ctx (&ctx, checksum_after_op0);
12291 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12292 fold_check_failed (op0, tem);
12294 md5_init_ctx (&ctx);
12295 fold_checksum_tree (op1, &ctx, ht);
12296 md5_finish_ctx (&ctx, checksum_after_op1);
12299 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12300 fold_check_failed (op1, tem);
12305 /* Fold a ternary tree expression with code CODE of type TYPE with
12306 operands OP0, OP1, and OP2. Return a folded expression if
12307 successful. Otherwise, return a tree expression with code CODE of
12308 type TYPE with operands OP0, OP1, and OP2. */
12311 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12315 #ifdef ENABLE_FOLD_CHECKING
12316 unsigned char checksum_before_op0[16],
12317 checksum_before_op1[16],
12318 checksum_before_op2[16],
12319 checksum_after_op0[16],
12320 checksum_after_op1[16],
12321 checksum_after_op2[16];
12322 struct md5_ctx ctx;
12325 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12326 md5_init_ctx (&ctx);
12327 fold_checksum_tree (op0, &ctx, ht);
12328 md5_finish_ctx (&ctx, checksum_before_op0);
12331 md5_init_ctx (&ctx);
12332 fold_checksum_tree (op1, &ctx, ht);
12333 md5_finish_ctx (&ctx, checksum_before_op1);
12336 md5_init_ctx (&ctx);
12337 fold_checksum_tree (op2, &ctx, ht);
12338 md5_finish_ctx (&ctx, checksum_before_op2);
12342 tem = fold_ternary (code, type, op0, op1, op2);
12344 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12346 #ifdef ENABLE_FOLD_CHECKING
12347 md5_init_ctx (&ctx);
12348 fold_checksum_tree (op0, &ctx, ht);
12349 md5_finish_ctx (&ctx, checksum_after_op0);
12352 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12353 fold_check_failed (op0, tem);
12355 md5_init_ctx (&ctx);
12356 fold_checksum_tree (op1, &ctx, ht);
12357 md5_finish_ctx (&ctx, checksum_after_op1);
12360 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12361 fold_check_failed (op1, tem);
12363 md5_init_ctx (&ctx);
12364 fold_checksum_tree (op2, &ctx, ht);
12365 md5_finish_ctx (&ctx, checksum_after_op2);
12368 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12369 fold_check_failed (op2, tem);
12374 /* Perform constant folding and related simplification of initializer
12375 expression EXPR. These behave identically to "fold_buildN" but ignore
12376 potential run-time traps and exceptions that fold must preserve. */
12378 #define START_FOLD_INIT \
12379 int saved_signaling_nans = flag_signaling_nans;\
12380 int saved_trapping_math = flag_trapping_math;\
12381 int saved_rounding_math = flag_rounding_math;\
12382 int saved_trapv = flag_trapv;\
12383 int saved_folding_initializer = folding_initializer;\
12384 flag_signaling_nans = 0;\
12385 flag_trapping_math = 0;\
12386 flag_rounding_math = 0;\
12388 folding_initializer = 1;
12390 #define END_FOLD_INIT \
12391 flag_signaling_nans = saved_signaling_nans;\
12392 flag_trapping_math = saved_trapping_math;\
12393 flag_rounding_math = saved_rounding_math;\
12394 flag_trapv = saved_trapv;\
12395 folding_initializer = saved_folding_initializer;
12398 fold_build1_initializer (enum tree_code code, tree type, tree op)
12403 result = fold_build1 (code, type, op);
12410 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12415 result = fold_build2 (code, type, op0, op1);
12422 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12428 result = fold_build3 (code, type, op0, op1, op2);
12434 #undef START_FOLD_INIT
12435 #undef END_FOLD_INIT
12437 /* Determine if first argument is a multiple of second argument. Return 0 if
12438 it is not, or we cannot easily determined it to be.
12440 An example of the sort of thing we care about (at this point; this routine
12441 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12442 fold cases do now) is discovering that
12444 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12450 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12452 This code also handles discovering that
12454 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12456 is a multiple of 8 so we don't have to worry about dealing with a
12457 possible remainder.
12459 Note that we *look* inside a SAVE_EXPR only to determine how it was
12460 calculated; it is not safe for fold to do much of anything else with the
12461 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12462 at run time. For example, the latter example above *cannot* be implemented
12463 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12464 evaluation time of the original SAVE_EXPR is not necessarily the same at
12465 the time the new expression is evaluated. The only optimization of this
12466 sort that would be valid is changing
12468 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12472 SAVE_EXPR (I) * SAVE_EXPR (J)
12474 (where the same SAVE_EXPR (J) is used in the original and the
12475 transformed version). */
12478 multiple_of_p (tree type, tree top, tree bottom)
12480 if (operand_equal_p (top, bottom, 0))
12483 if (TREE_CODE (type) != INTEGER_TYPE)
12486 switch (TREE_CODE (top))
12489 /* Bitwise and provides a power of two multiple. If the mask is
12490 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12491 if (!integer_pow2p (bottom))
12496 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12497 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12501 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12502 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12505 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12509 op1 = TREE_OPERAND (top, 1);
12510 /* const_binop may not detect overflow correctly,
12511 so check for it explicitly here. */
12512 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12513 > TREE_INT_CST_LOW (op1)
12514 && TREE_INT_CST_HIGH (op1) == 0
12515 && 0 != (t1 = fold_convert (type,
12516 const_binop (LSHIFT_EXPR,
12519 && ! TREE_OVERFLOW (t1))
12520 return multiple_of_p (type, t1, bottom);
12525 /* Can't handle conversions from non-integral or wider integral type. */
12526 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12527 || (TYPE_PRECISION (type)
12528 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12531 /* .. fall through ... */
12534 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12537 if (TREE_CODE (bottom) != INTEGER_CST
12538 || (TYPE_UNSIGNED (type)
12539 && (tree_int_cst_sgn (top) < 0
12540 || tree_int_cst_sgn (bottom) < 0)))
12542 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12550 /* Return true if `t' is known to be non-negative. */
12553 tree_expr_nonnegative_p (tree t)
12555 if (t == error_mark_node)
12558 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12561 switch (TREE_CODE (t))
12564 /* Query VRP to see if it has recorded any information about
12565 the range of this object. */
12566 return ssa_name_nonnegative_p (t);
12569 /* We can't return 1 if flag_wrapv is set because
12570 ABS_EXPR<INT_MIN> = INT_MIN. */
12571 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12576 return tree_int_cst_sgn (t) >= 0;
12579 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12582 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12583 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12584 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12586 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12587 both unsigned and at least 2 bits shorter than the result. */
12588 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12589 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12590 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12592 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12593 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12594 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12595 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12597 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12598 TYPE_PRECISION (inner2)) + 1;
12599 return prec < TYPE_PRECISION (TREE_TYPE (t));
12605 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12607 /* x * x for floating point x is always non-negative. */
12608 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12610 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12611 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12614 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12615 both unsigned and their total bits is shorter than the result. */
12616 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12617 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12618 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12620 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12621 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12622 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12623 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12624 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12625 < TYPE_PRECISION (TREE_TYPE (t));
12631 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12632 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12638 case TRUNC_DIV_EXPR:
12639 case CEIL_DIV_EXPR:
12640 case FLOOR_DIV_EXPR:
12641 case ROUND_DIV_EXPR:
12642 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12643 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12645 case TRUNC_MOD_EXPR:
12646 case CEIL_MOD_EXPR:
12647 case FLOOR_MOD_EXPR:
12648 case ROUND_MOD_EXPR:
12650 case NON_LVALUE_EXPR:
12652 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12654 case COMPOUND_EXPR:
12656 case GIMPLE_MODIFY_STMT:
12657 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12660 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12663 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12664 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12668 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12669 tree outer_type = TREE_TYPE (t);
12671 if (TREE_CODE (outer_type) == REAL_TYPE)
12673 if (TREE_CODE (inner_type) == REAL_TYPE)
12674 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12675 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12677 if (TYPE_UNSIGNED (inner_type))
12679 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12682 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12684 if (TREE_CODE (inner_type) == REAL_TYPE)
12685 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12686 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12687 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12688 && TYPE_UNSIGNED (inner_type);
12695 tree temp = TARGET_EXPR_SLOT (t);
12696 t = TARGET_EXPR_INITIAL (t);
12698 /* If the initializer is non-void, then it's a normal expression
12699 that will be assigned to the slot. */
12700 if (!VOID_TYPE_P (t))
12701 return tree_expr_nonnegative_p (t);
12703 /* Otherwise, the initializer sets the slot in some way. One common
12704 way is an assignment statement at the end of the initializer. */
12707 if (TREE_CODE (t) == BIND_EXPR)
12708 t = expr_last (BIND_EXPR_BODY (t));
12709 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12710 || TREE_CODE (t) == TRY_CATCH_EXPR)
12711 t = expr_last (TREE_OPERAND (t, 0));
12712 else if (TREE_CODE (t) == STATEMENT_LIST)
12717 if ((TREE_CODE (t) == MODIFY_EXPR
12718 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12719 && GENERIC_TREE_OPERAND (t, 0) == temp)
12720 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12727 tree fndecl = get_callee_fndecl (t);
12728 tree arglist = TREE_OPERAND (t, 1);
12729 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12730 switch (DECL_FUNCTION_CODE (fndecl))
12732 CASE_FLT_FN (BUILT_IN_ACOS):
12733 CASE_FLT_FN (BUILT_IN_ACOSH):
12734 CASE_FLT_FN (BUILT_IN_CABS):
12735 CASE_FLT_FN (BUILT_IN_COSH):
12736 CASE_FLT_FN (BUILT_IN_ERFC):
12737 CASE_FLT_FN (BUILT_IN_EXP):
12738 CASE_FLT_FN (BUILT_IN_EXP10):
12739 CASE_FLT_FN (BUILT_IN_EXP2):
12740 CASE_FLT_FN (BUILT_IN_FABS):
12741 CASE_FLT_FN (BUILT_IN_FDIM):
12742 CASE_FLT_FN (BUILT_IN_HYPOT):
12743 CASE_FLT_FN (BUILT_IN_POW10):
12744 CASE_INT_FN (BUILT_IN_FFS):
12745 CASE_INT_FN (BUILT_IN_PARITY):
12746 CASE_INT_FN (BUILT_IN_POPCOUNT):
12747 case BUILT_IN_BSWAP32:
12748 case BUILT_IN_BSWAP64:
12752 CASE_FLT_FN (BUILT_IN_SQRT):
12753 /* sqrt(-0.0) is -0.0. */
12754 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12756 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12758 CASE_FLT_FN (BUILT_IN_ASINH):
12759 CASE_FLT_FN (BUILT_IN_ATAN):
12760 CASE_FLT_FN (BUILT_IN_ATANH):
12761 CASE_FLT_FN (BUILT_IN_CBRT):
12762 CASE_FLT_FN (BUILT_IN_CEIL):
12763 CASE_FLT_FN (BUILT_IN_ERF):
12764 CASE_FLT_FN (BUILT_IN_EXPM1):
12765 CASE_FLT_FN (BUILT_IN_FLOOR):
12766 CASE_FLT_FN (BUILT_IN_FMOD):
12767 CASE_FLT_FN (BUILT_IN_FREXP):
12768 CASE_FLT_FN (BUILT_IN_LCEIL):
12769 CASE_FLT_FN (BUILT_IN_LDEXP):
12770 CASE_FLT_FN (BUILT_IN_LFLOOR):
12771 CASE_FLT_FN (BUILT_IN_LLCEIL):
12772 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12773 CASE_FLT_FN (BUILT_IN_LLRINT):
12774 CASE_FLT_FN (BUILT_IN_LLROUND):
12775 CASE_FLT_FN (BUILT_IN_LRINT):
12776 CASE_FLT_FN (BUILT_IN_LROUND):
12777 CASE_FLT_FN (BUILT_IN_MODF):
12778 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12779 CASE_FLT_FN (BUILT_IN_RINT):
12780 CASE_FLT_FN (BUILT_IN_ROUND):
12781 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12782 CASE_FLT_FN (BUILT_IN_SINH):
12783 CASE_FLT_FN (BUILT_IN_TANH):
12784 CASE_FLT_FN (BUILT_IN_TRUNC):
12785 /* True if the 1st argument is nonnegative. */
12786 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12788 CASE_FLT_FN (BUILT_IN_FMAX):
12789 /* True if the 1st OR 2nd arguments are nonnegative. */
12790 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12791 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12793 CASE_FLT_FN (BUILT_IN_FMIN):
12794 /* True if the 1st AND 2nd arguments are nonnegative. */
12795 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12796 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12798 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12799 /* True if the 2nd argument is nonnegative. */
12800 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12802 CASE_FLT_FN (BUILT_IN_POWI):
12803 /* True if the 1st argument is nonnegative or the second
12804 argument is an even integer. */
12805 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12807 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12808 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12811 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12813 CASE_FLT_FN (BUILT_IN_POW):
12814 /* True if the 1st argument is nonnegative or the second
12815 argument is an even integer valued real. */
12816 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12821 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12822 n = real_to_integer (&c);
12825 REAL_VALUE_TYPE cint;
12826 real_from_integer (&cint, VOIDmode, n,
12827 n < 0 ? -1 : 0, 0);
12828 if (real_identical (&c, &cint))
12832 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12839 /* ... fall through ... */
12842 if (truth_value_p (TREE_CODE (t)))
12843 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12847 /* We don't know sign of `t', so be conservative and return false. */
12851 /* Return true when T is an address and is known to be nonzero.
12852 For floating point we further ensure that T is not denormal.
12853 Similar logic is present in nonzero_address in rtlanal.h. */
12856 tree_expr_nonzero_p (tree t)
12858 tree type = TREE_TYPE (t);
12860 /* Doing something useful for floating point would need more work. */
12861 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12864 switch (TREE_CODE (t))
12867 /* Query VRP to see if it has recorded any information about
12868 the range of this object. */
12869 return ssa_name_nonzero_p (t);
12872 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12875 /* We used to test for !integer_zerop here. This does not work correctly
12876 if TREE_CONSTANT_OVERFLOW (t). */
12877 return (TREE_INT_CST_LOW (t) != 0
12878 || TREE_INT_CST_HIGH (t) != 0);
12881 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12883 /* With the presence of negative values it is hard
12884 to say something. */
12885 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12886 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12888 /* One of operands must be positive and the other non-negative. */
12889 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12890 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12895 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12897 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12898 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12904 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12905 tree outer_type = TREE_TYPE (t);
12907 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12908 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12914 tree base = get_base_address (TREE_OPERAND (t, 0));
12919 /* Weak declarations may link to NULL. */
12920 if (VAR_OR_FUNCTION_DECL_P (base))
12921 return !DECL_WEAK (base);
12923 /* Constants are never weak. */
12924 if (CONSTANT_CLASS_P (base))
12931 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12932 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12935 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12936 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12939 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12941 /* When both operands are nonzero, then MAX must be too. */
12942 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12945 /* MAX where operand 0 is positive is positive. */
12946 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12948 /* MAX where operand 1 is positive is positive. */
12949 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12950 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12954 case COMPOUND_EXPR:
12956 case GIMPLE_MODIFY_STMT:
12958 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12961 case NON_LVALUE_EXPR:
12962 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12965 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12966 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12969 return alloca_call_p (t);
12977 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12978 attempt to fold the expression to a constant without modifying TYPE,
12981 If the expression could be simplified to a constant, then return
12982 the constant. If the expression would not be simplified to a
12983 constant, then return NULL_TREE. */
12986 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12988 tree tem = fold_binary (code, type, op0, op1);
12989 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12992 /* Given the components of a unary expression CODE, TYPE and OP0,
12993 attempt to fold the expression to a constant without modifying
12996 If the expression could be simplified to a constant, then return
12997 the constant. If the expression would not be simplified to a
12998 constant, then return NULL_TREE. */
13001 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13003 tree tem = fold_unary (code, type, op0);
13004 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13007 /* If EXP represents referencing an element in a constant string
13008 (either via pointer arithmetic or array indexing), return the
13009 tree representing the value accessed, otherwise return NULL. */
13012 fold_read_from_constant_string (tree exp)
13014 if ((TREE_CODE (exp) == INDIRECT_REF
13015 || TREE_CODE (exp) == ARRAY_REF)
13016 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13018 tree exp1 = TREE_OPERAND (exp, 0);
13022 if (TREE_CODE (exp) == INDIRECT_REF)
13023 string = string_constant (exp1, &index);
13026 tree low_bound = array_ref_low_bound (exp);
13027 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13029 /* Optimize the special-case of a zero lower bound.
13031 We convert the low_bound to sizetype to avoid some problems
13032 with constant folding. (E.g. suppose the lower bound is 1,
13033 and its mode is QI. Without the conversion,l (ARRAY
13034 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13035 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13036 if (! integer_zerop (low_bound))
13037 index = size_diffop (index, fold_convert (sizetype, low_bound));
13043 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13044 && TREE_CODE (string) == STRING_CST
13045 && TREE_CODE (index) == INTEGER_CST
13046 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13047 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13049 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13050 return fold_convert (TREE_TYPE (exp),
13051 build_int_cst (NULL_TREE,
13052 (TREE_STRING_POINTER (string)
13053 [TREE_INT_CST_LOW (index)])));
13058 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13059 an integer constant or real constant.
13061 TYPE is the type of the result. */
13064 fold_negate_const (tree arg0, tree type)
13066 tree t = NULL_TREE;
13068 switch (TREE_CODE (arg0))
13072 unsigned HOST_WIDE_INT low;
13073 HOST_WIDE_INT high;
13074 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13075 TREE_INT_CST_HIGH (arg0),
13077 t = force_fit_type_double (type, low, high, 1,
13078 (overflow | TREE_OVERFLOW (arg0))
13079 && !TYPE_UNSIGNED (type),
13080 TREE_CONSTANT_OVERFLOW (arg0));
13085 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13089 gcc_unreachable ();
13095 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13096 an integer constant or real constant.
13098 TYPE is the type of the result. */
13101 fold_abs_const (tree arg0, tree type)
13103 tree t = NULL_TREE;
13105 switch (TREE_CODE (arg0))
13108 /* If the value is unsigned, then the absolute value is
13109 the same as the ordinary value. */
13110 if (TYPE_UNSIGNED (type))
13112 /* Similarly, if the value is non-negative. */
13113 else if (INT_CST_LT (integer_minus_one_node, arg0))
13115 /* If the value is negative, then the absolute value is
13119 unsigned HOST_WIDE_INT low;
13120 HOST_WIDE_INT high;
13121 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13122 TREE_INT_CST_HIGH (arg0),
13124 t = force_fit_type_double (type, low, high, -1,
13125 overflow | TREE_OVERFLOW (arg0),
13126 TREE_CONSTANT_OVERFLOW (arg0));
13131 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13132 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13138 gcc_unreachable ();
13144 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13145 constant. TYPE is the type of the result. */
13148 fold_not_const (tree arg0, tree type)
13150 tree t = NULL_TREE;
13152 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13154 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13155 ~TREE_INT_CST_HIGH (arg0), 0,
13156 TREE_OVERFLOW (arg0),
13157 TREE_CONSTANT_OVERFLOW (arg0));
13162 /* Given CODE, a relational operator, the target type, TYPE and two
13163 constant operands OP0 and OP1, return the result of the
13164 relational operation. If the result is not a compile time
13165 constant, then return NULL_TREE. */
13168 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13170 int result, invert;
13172 /* From here on, the only cases we handle are when the result is
13173 known to be a constant. */
13175 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13177 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13178 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13180 /* Handle the cases where either operand is a NaN. */
13181 if (real_isnan (c0) || real_isnan (c1))
13191 case UNORDERED_EXPR:
13205 if (flag_trapping_math)
13211 gcc_unreachable ();
13214 return constant_boolean_node (result, type);
13217 return constant_boolean_node (real_compare (code, c0, c1), type);
13220 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13222 To compute GT, swap the arguments and do LT.
13223 To compute GE, do LT and invert the result.
13224 To compute LE, swap the arguments, do LT and invert the result.
13225 To compute NE, do EQ and invert the result.
13227 Therefore, the code below must handle only EQ and LT. */
13229 if (code == LE_EXPR || code == GT_EXPR)
13234 code = swap_tree_comparison (code);
13237 /* Note that it is safe to invert for real values here because we
13238 have already handled the one case that it matters. */
13241 if (code == NE_EXPR || code == GE_EXPR)
13244 code = invert_tree_comparison (code, false);
13247 /* Compute a result for LT or EQ if args permit;
13248 Otherwise return T. */
13249 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13251 if (code == EQ_EXPR)
13252 result = tree_int_cst_equal (op0, op1);
13253 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13254 result = INT_CST_LT_UNSIGNED (op0, op1);
13256 result = INT_CST_LT (op0, op1);
13263 return constant_boolean_node (result, type);
13266 /* Build an expression for the a clean point containing EXPR with type TYPE.
13267 Don't build a cleanup point expression for EXPR which don't have side
13271 fold_build_cleanup_point_expr (tree type, tree expr)
13273 /* If the expression does not have side effects then we don't have to wrap
13274 it with a cleanup point expression. */
13275 if (!TREE_SIDE_EFFECTS (expr))
13278 /* If the expression is a return, check to see if the expression inside the
13279 return has no side effects or the right hand side of the modify expression
13280 inside the return. If either don't have side effects set we don't need to
13281 wrap the expression in a cleanup point expression. Note we don't check the
13282 left hand side of the modify because it should always be a return decl. */
13283 if (TREE_CODE (expr) == RETURN_EXPR)
13285 tree op = TREE_OPERAND (expr, 0);
13286 if (!op || !TREE_SIDE_EFFECTS (op))
13288 op = TREE_OPERAND (op, 1);
13289 if (!TREE_SIDE_EFFECTS (op))
13293 return build1 (CLEANUP_POINT_EXPR, type, expr);
13296 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13297 avoid confusing the gimplify process. */
13300 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13302 /* The size of the object is not relevant when talking about its address. */
13303 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13304 t = TREE_OPERAND (t, 0);
13306 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13307 if (TREE_CODE (t) == INDIRECT_REF
13308 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13310 t = TREE_OPERAND (t, 0);
13311 if (TREE_TYPE (t) != ptrtype)
13312 t = build1 (NOP_EXPR, ptrtype, t);
13318 while (handled_component_p (base))
13319 base = TREE_OPERAND (base, 0);
13321 TREE_ADDRESSABLE (base) = 1;
13323 t = build1 (ADDR_EXPR, ptrtype, t);
13330 build_fold_addr_expr (tree t)
13332 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13335 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13336 of an indirection through OP0, or NULL_TREE if no simplification is
13340 fold_indirect_ref_1 (tree type, tree op0)
13346 subtype = TREE_TYPE (sub);
13347 if (!POINTER_TYPE_P (subtype))
13350 if (TREE_CODE (sub) == ADDR_EXPR)
13352 tree op = TREE_OPERAND (sub, 0);
13353 tree optype = TREE_TYPE (op);
13354 /* *&CONST_DECL -> to the value of the const decl. */
13355 if (TREE_CODE (op) == CONST_DECL)
13356 return DECL_INITIAL (op);
13357 /* *&p => p; make sure to handle *&"str"[cst] here. */
13358 if (type == optype)
13360 tree fop = fold_read_from_constant_string (op);
13366 /* *(foo *)&fooarray => fooarray[0] */
13367 else if (TREE_CODE (optype) == ARRAY_TYPE
13368 && type == TREE_TYPE (optype))
13370 tree type_domain = TYPE_DOMAIN (optype);
13371 tree min_val = size_zero_node;
13372 if (type_domain && TYPE_MIN_VALUE (type_domain))
13373 min_val = TYPE_MIN_VALUE (type_domain);
13374 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13376 /* *(foo *)&complexfoo => __real__ complexfoo */
13377 else if (TREE_CODE (optype) == COMPLEX_TYPE
13378 && type == TREE_TYPE (optype))
13379 return fold_build1 (REALPART_EXPR, type, op);
13380 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13381 else if (TREE_CODE (optype) == VECTOR_TYPE
13382 && type == TREE_TYPE (optype))
13384 tree part_width = TYPE_SIZE (type);
13385 tree index = bitsize_int (0);
13386 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13390 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13391 if (TREE_CODE (sub) == PLUS_EXPR
13392 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13394 tree op00 = TREE_OPERAND (sub, 0);
13395 tree op01 = TREE_OPERAND (sub, 1);
13399 op00type = TREE_TYPE (op00);
13400 if (TREE_CODE (op00) == ADDR_EXPR
13401 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13402 && type == TREE_TYPE (TREE_TYPE (op00type)))
13404 tree size = TYPE_SIZE_UNIT (type);
13405 if (tree_int_cst_equal (size, op01))
13406 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13410 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13411 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13412 && type == TREE_TYPE (TREE_TYPE (subtype)))
13415 tree min_val = size_zero_node;
13416 sub = build_fold_indirect_ref (sub);
13417 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13418 if (type_domain && TYPE_MIN_VALUE (type_domain))
13419 min_val = TYPE_MIN_VALUE (type_domain);
13420 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13426 /* Builds an expression for an indirection through T, simplifying some
13430 build_fold_indirect_ref (tree t)
13432 tree type = TREE_TYPE (TREE_TYPE (t));
13433 tree sub = fold_indirect_ref_1 (type, t);
13438 return build1 (INDIRECT_REF, type, t);
13441 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13444 fold_indirect_ref (tree t)
13446 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13454 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13455 whose result is ignored. The type of the returned tree need not be
13456 the same as the original expression. */
13459 fold_ignored_result (tree t)
13461 if (!TREE_SIDE_EFFECTS (t))
13462 return integer_zero_node;
13465 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13468 t = TREE_OPERAND (t, 0);
13472 case tcc_comparison:
13473 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13474 t = TREE_OPERAND (t, 0);
13475 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13476 t = TREE_OPERAND (t, 1);
13481 case tcc_expression:
13482 switch (TREE_CODE (t))
13484 case COMPOUND_EXPR:
13485 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13487 t = TREE_OPERAND (t, 0);
13491 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13492 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13494 t = TREE_OPERAND (t, 0);
13507 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13508 This can only be applied to objects of a sizetype. */
13511 round_up (tree value, int divisor)
13513 tree div = NULL_TREE;
13515 gcc_assert (divisor > 0);
13519 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13520 have to do anything. Only do this when we are not given a const,
13521 because in that case, this check is more expensive than just
13523 if (TREE_CODE (value) != INTEGER_CST)
13525 div = build_int_cst (TREE_TYPE (value), divisor);
13527 if (multiple_of_p (TREE_TYPE (value), value, div))
13531 /* If divisor is a power of two, simplify this to bit manipulation. */
13532 if (divisor == (divisor & -divisor))
13536 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13537 value = size_binop (PLUS_EXPR, value, t);
13538 t = build_int_cst (TREE_TYPE (value), -divisor);
13539 value = size_binop (BIT_AND_EXPR, value, t);
13544 div = build_int_cst (TREE_TYPE (value), divisor);
13545 value = size_binop (CEIL_DIV_EXPR, value, div);
13546 value = size_binop (MULT_EXPR, value, div);
13552 /* Likewise, but round down. */
13555 round_down (tree value, int divisor)
13557 tree div = NULL_TREE;
13559 gcc_assert (divisor > 0);
13563 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13564 have to do anything. Only do this when we are not given a const,
13565 because in that case, this check is more expensive than just
13567 if (TREE_CODE (value) != INTEGER_CST)
13569 div = build_int_cst (TREE_TYPE (value), divisor);
13571 if (multiple_of_p (TREE_TYPE (value), value, div))
13575 /* If divisor is a power of two, simplify this to bit manipulation. */
13576 if (divisor == (divisor & -divisor))
13580 t = build_int_cst (TREE_TYPE (value), -divisor);
13581 value = size_binop (BIT_AND_EXPR, value, t);
13586 div = build_int_cst (TREE_TYPE (value), divisor);
13587 value = size_binop (FLOOR_DIV_EXPR, value, div);
13588 value = size_binop (MULT_EXPR, value, div);
13594 /* Returns the pointer to the base of the object addressed by EXP and
13595 extracts the information about the offset of the access, storing it
13596 to PBITPOS and POFFSET. */
13599 split_address_to_core_and_offset (tree exp,
13600 HOST_WIDE_INT *pbitpos, tree *poffset)
13603 enum machine_mode mode;
13604 int unsignedp, volatilep;
13605 HOST_WIDE_INT bitsize;
13607 if (TREE_CODE (exp) == ADDR_EXPR)
13609 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13610 poffset, &mode, &unsignedp, &volatilep,
13612 core = build_fold_addr_expr (core);
13618 *poffset = NULL_TREE;
13624 /* Returns true if addresses of E1 and E2 differ by a constant, false
13625 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13628 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13631 HOST_WIDE_INT bitpos1, bitpos2;
13632 tree toffset1, toffset2, tdiff, type;
13634 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13635 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13637 if (bitpos1 % BITS_PER_UNIT != 0
13638 || bitpos2 % BITS_PER_UNIT != 0
13639 || !operand_equal_p (core1, core2, 0))
13642 if (toffset1 && toffset2)
13644 type = TREE_TYPE (toffset1);
13645 if (type != TREE_TYPE (toffset2))
13646 toffset2 = fold_convert (type, toffset2);
13648 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13649 if (!cst_and_fits_in_hwi (tdiff))
13652 *diff = int_cst_value (tdiff);
13654 else if (toffset1 || toffset2)
13656 /* If only one of the offsets is non-constant, the difference cannot
13663 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13667 /* Simplify the floating point expression EXP when the sign of the
13668 result is not significant. Return NULL_TREE if no simplification
13672 fold_strip_sign_ops (tree exp)
13676 switch (TREE_CODE (exp))
13680 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13681 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13685 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13687 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13688 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13689 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13690 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13691 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13692 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13695 case COMPOUND_EXPR:
13696 arg0 = TREE_OPERAND (exp, 0);
13697 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13699 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13703 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13704 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13706 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13707 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13708 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13713 const enum built_in_function fcode = builtin_mathfn_code (exp);
13716 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13717 /* Strip copysign function call, return the 1st argument. */
13718 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13719 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13720 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13723 /* Strip sign ops from the argument of "odd" math functions. */
13724 if (negate_mathfn_p (fcode))
13726 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13728 return build_function_call_expr (get_callee_fndecl (exp),
13729 build_tree_list (NULL_TREE,