1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
63 #include "langhooks.h"
66 /* Non-zero if we are folding constants inside an initializer; zero
68 int folding_initializer = 0;
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
92 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
93 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static tree combine_comparisons (enum tree_code, enum tree_code,
103 enum tree_code, tree, tree, tree);
104 static int truth_value_p (enum tree_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand (tree, tree, tree);
109 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (tree, tree, int, int, int);
111 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
115 static int all_ones_mask_p (tree, int);
116 static tree sign_bit_p (tree, tree);
117 static int simple_operand_p (tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
132 static int multiple_of_p (tree, tree, tree);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static int native_encode_expr (tree, unsigned char *, int);
146 static tree native_interpret_expr (tree, unsigned char *, int);
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
159 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
160 We do that by representing the two-word integer in 4 words, with only
161 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
162 number. The value of the word is LOWPART + HIGHPART * BASE. */
165 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
166 #define HIGHPART(x) \
167 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
168 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
170 /* Unpack a two-word integer into 4 words.
171 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
172 WORDS points to the array of HOST_WIDE_INTs. */
175 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
177 words[0] = LOWPART (low);
178 words[1] = HIGHPART (low);
179 words[2] = LOWPART (hi);
180 words[3] = HIGHPART (hi);
183 /* Pack an array of 4 words into a two-word integer.
184 WORDS points to the array of words.
185 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
188 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
191 *low = words[0] + words[1] * BASE;
192 *hi = words[2] + words[3] * BASE;
195 /* Force the double-word integer L1, H1 to be within the range of the
196 integer type TYPE. Stores the properly truncated and sign-extended
197 double-word integer in *LV, *HV. Returns true if the operation
198 overflows, that is, argument and result are different. */
201 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
202 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
204 unsigned HOST_WIDE_INT low0 = l1;
205 HOST_WIDE_INT high0 = h1;
207 int sign_extended_type;
209 if (POINTER_TYPE_P (type)
210 || TREE_CODE (type) == OFFSET_TYPE)
213 prec = TYPE_PRECISION (type);
215 /* Size types *are* sign extended. */
216 sign_extended_type = (!TYPE_UNSIGNED (type)
217 || (TREE_CODE (type) == INTEGER_TYPE
218 && TYPE_IS_SIZETYPE (type)));
220 /* First clear all bits that are beyond the type's precision. */
221 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
223 else if (prec > HOST_BITS_PER_WIDE_INT)
224 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
228 if (prec < HOST_BITS_PER_WIDE_INT)
229 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
232 /* Then do sign extension if necessary. */
233 if (!sign_extended_type)
234 /* No sign extension */;
235 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
236 /* Correct width already. */;
237 else if (prec > HOST_BITS_PER_WIDE_INT)
239 /* Sign extend top half? */
240 if (h1 & ((unsigned HOST_WIDE_INT)1
241 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
242 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 else if (prec == HOST_BITS_PER_WIDE_INT)
246 if ((HOST_WIDE_INT)l1 < 0)
251 /* Sign extend bottom half? */
252 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255 l1 |= (HOST_WIDE_INT)(-1) << prec;
262 /* If the value didn't fit, signal overflow. */
263 return l1 != low0 || h1 != high0;
266 /* We force the double-int HIGH:LOW to the range of the type TYPE by
267 sign or zero extending it.
268 OVERFLOWABLE indicates if we are interested
269 in overflow of the value, when >0 we are only interested in signed
270 overflow, for <0 we are interested in any overflow. OVERFLOWED
271 indicates whether overflow has already occurred. CONST_OVERFLOWED
272 indicates whether constant overflow has already occurred. We force
273 T's value to be within range of T's type (by setting to 0 or 1 all
274 the bits outside the type's range). We set TREE_OVERFLOWED if,
275 OVERFLOWED is nonzero,
276 or OVERFLOWABLE is >0 and signed overflow occurs
277 or OVERFLOWABLE is <0 and any overflow occurs
278 We set TREE_CONSTANT_OVERFLOWED if,
279 CONST_OVERFLOWED is nonzero
280 or we set TREE_OVERFLOWED.
281 We return a new tree node for the extended double-int. The node
282 is shared if no overflow flags are set. */
285 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
286 HOST_WIDE_INT high, int overflowable,
287 bool overflowed, bool overflowed_const)
289 int sign_extended_type;
292 /* Size types *are* sign extended. */
293 sign_extended_type = (!TYPE_UNSIGNED (type)
294 || (TREE_CODE (type) == INTEGER_TYPE
295 && TYPE_IS_SIZETYPE (type)));
297 overflow = fit_double_type (low, high, &low, &high, type);
299 /* If we need to set overflow flags, return a new unshared node. */
300 if (overflowed || overflowed_const || overflow)
304 || (overflowable > 0 && sign_extended_type))
306 tree t = make_node (INTEGER_CST);
307 TREE_INT_CST_LOW (t) = low;
308 TREE_INT_CST_HIGH (t) = high;
309 TREE_TYPE (t) = type;
310 TREE_OVERFLOW (t) = 1;
311 TREE_CONSTANT_OVERFLOW (t) = 1;
315 else if (overflowed_const)
317 tree t = make_node (INTEGER_CST);
318 TREE_INT_CST_LOW (t) = low;
319 TREE_INT_CST_HIGH (t) = high;
320 TREE_TYPE (t) = type;
321 TREE_CONSTANT_OVERFLOW (t) = 1;
327 /* Else build a shared node. */
328 return build_int_cst_wide (type, low, high);
331 /* Add two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows according to UNSIGNED_P.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
338 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
343 unsigned HOST_WIDE_INT l;
347 h = h1 + h2 + (l < l1);
353 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
355 return OVERFLOW_SUM_SIGN (h1, h2, h);
358 /* Negate a doubleword integer with doubleword result.
359 Return nonzero if the operation overflows, assuming it's signed.
360 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
361 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
364 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
365 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
371 return (*hv & h1) < 0;
381 /* Multiply two doubleword integers with doubleword result.
382 Return nonzero if the operation overflows according to UNSIGNED_P.
383 Each argument is given as two `HOST_WIDE_INT' pieces.
384 One argument is L1 and H1; the other, L2 and H2.
385 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
388 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
389 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
390 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
393 HOST_WIDE_INT arg1[4];
394 HOST_WIDE_INT arg2[4];
395 HOST_WIDE_INT prod[4 * 2];
396 unsigned HOST_WIDE_INT carry;
398 unsigned HOST_WIDE_INT toplow, neglow;
399 HOST_WIDE_INT tophigh, neghigh;
401 encode (arg1, l1, h1);
402 encode (arg2, l2, h2);
404 memset (prod, 0, sizeof prod);
406 for (i = 0; i < 4; i++)
409 for (j = 0; j < 4; j++)
412 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
413 carry += arg1[i] * arg2[j];
414 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
416 prod[k] = LOWPART (carry);
417 carry = HIGHPART (carry);
422 decode (prod, lv, hv);
423 decode (prod + 4, &toplow, &tophigh);
425 /* Unsigned overflow is immediate. */
427 return (toplow | tophigh) != 0;
429 /* Check for signed overflow by calculating the signed representation of the
430 top half of the result; it should agree with the low half's sign bit. */
433 neg_double (l2, h2, &neglow, &neghigh);
434 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
438 neg_double (l1, h1, &neglow, &neghigh);
439 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
441 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
444 /* Shift the doubleword integer in L1, H1 left by COUNT places
445 keeping only PREC bits of result.
446 Shift right if COUNT is negative.
447 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
448 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
451 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
452 HOST_WIDE_INT count, unsigned int prec,
453 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
455 unsigned HOST_WIDE_INT signmask;
459 rshift_double (l1, h1, -count, prec, lv, hv, arith);
463 if (SHIFT_COUNT_TRUNCATED)
466 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
468 /* Shifting by the host word size is undefined according to the
469 ANSI standard, so we must handle this as a special case. */
473 else if (count >= HOST_BITS_PER_WIDE_INT)
475 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
480 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
481 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
485 /* Sign extend all bits that are beyond the precision. */
487 signmask = -((prec > HOST_BITS_PER_WIDE_INT
488 ? ((unsigned HOST_WIDE_INT) *hv
489 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
490 : (*lv >> (prec - 1))) & 1);
492 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
494 else if (prec >= HOST_BITS_PER_WIDE_INT)
496 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
497 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
502 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
503 *lv |= signmask << prec;
507 /* Shift the doubleword integer in L1, H1 right by COUNT places
508 keeping only PREC bits of result. COUNT must be positive.
509 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
513 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
514 HOST_WIDE_INT count, unsigned int prec,
515 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
518 unsigned HOST_WIDE_INT signmask;
521 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
524 if (SHIFT_COUNT_TRUNCATED)
527 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
529 /* Shifting by the host word size is undefined according to the
530 ANSI standard, so we must handle this as a special case. */
534 else if (count >= HOST_BITS_PER_WIDE_INT)
537 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
541 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
543 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
546 /* Zero / sign extend all bits that are beyond the precision. */
548 if (count >= (HOST_WIDE_INT)prec)
553 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
555 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
557 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
558 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
563 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
564 *lv |= signmask << (prec - count);
568 /* Rotate the doubleword integer in L1, H1 left by COUNT places
569 keeping only PREC bits of result.
570 Rotate right if COUNT is negative.
571 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
574 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
575 HOST_WIDE_INT count, unsigned int prec,
576 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
578 unsigned HOST_WIDE_INT s1l, s2l;
579 HOST_WIDE_INT s1h, s2h;
585 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
586 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
591 /* Rotate the doubleword integer in L1, H1 left by COUNT places
592 keeping only PREC bits of result. COUNT must be positive.
593 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
596 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
597 HOST_WIDE_INT count, unsigned int prec,
598 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
600 unsigned HOST_WIDE_INT s1l, s2l;
601 HOST_WIDE_INT s1h, s2h;
607 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
608 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
613 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
614 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
615 CODE is a tree code for a kind of division, one of
616 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
618 It controls how the quotient is rounded to an integer.
619 Return nonzero if the operation overflows.
620 UNS nonzero says do unsigned division. */
623 div_and_round_double (enum tree_code code, int uns,
624 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
625 HOST_WIDE_INT hnum_orig,
626 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
627 HOST_WIDE_INT hden_orig,
628 unsigned HOST_WIDE_INT *lquo,
629 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
633 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
634 HOST_WIDE_INT den[4], quo[4];
636 unsigned HOST_WIDE_INT work;
637 unsigned HOST_WIDE_INT carry = 0;
638 unsigned HOST_WIDE_INT lnum = lnum_orig;
639 HOST_WIDE_INT hnum = hnum_orig;
640 unsigned HOST_WIDE_INT lden = lden_orig;
641 HOST_WIDE_INT hden = hden_orig;
644 if (hden == 0 && lden == 0)
645 overflow = 1, lden = 1;
647 /* Calculate quotient sign and convert operands to unsigned. */
653 /* (minimum integer) / (-1) is the only overflow case. */
654 if (neg_double (lnum, hnum, &lnum, &hnum)
655 && ((HOST_WIDE_INT) lden & hden) == -1)
661 neg_double (lden, hden, &lden, &hden);
665 if (hnum == 0 && hden == 0)
666 { /* single precision */
668 /* This unsigned division rounds toward zero. */
674 { /* trivial case: dividend < divisor */
675 /* hden != 0 already checked. */
682 memset (quo, 0, sizeof quo);
684 memset (num, 0, sizeof num); /* to zero 9th element */
685 memset (den, 0, sizeof den);
687 encode (num, lnum, hnum);
688 encode (den, lden, hden);
690 /* Special code for when the divisor < BASE. */
691 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
693 /* hnum != 0 already checked. */
694 for (i = 4 - 1; i >= 0; i--)
696 work = num[i] + carry * BASE;
697 quo[i] = work / lden;
703 /* Full double precision division,
704 with thanks to Don Knuth's "Seminumerical Algorithms". */
705 int num_hi_sig, den_hi_sig;
706 unsigned HOST_WIDE_INT quo_est, scale;
708 /* Find the highest nonzero divisor digit. */
709 for (i = 4 - 1;; i--)
716 /* Insure that the first digit of the divisor is at least BASE/2.
717 This is required by the quotient digit estimation algorithm. */
719 scale = BASE / (den[den_hi_sig] + 1);
721 { /* scale divisor and dividend */
723 for (i = 0; i <= 4 - 1; i++)
725 work = (num[i] * scale) + carry;
726 num[i] = LOWPART (work);
727 carry = HIGHPART (work);
732 for (i = 0; i <= 4 - 1; i++)
734 work = (den[i] * scale) + carry;
735 den[i] = LOWPART (work);
736 carry = HIGHPART (work);
737 if (den[i] != 0) den_hi_sig = i;
744 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
746 /* Guess the next quotient digit, quo_est, by dividing the first
747 two remaining dividend digits by the high order quotient digit.
748 quo_est is never low and is at most 2 high. */
749 unsigned HOST_WIDE_INT tmp;
751 num_hi_sig = i + den_hi_sig + 1;
752 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
753 if (num[num_hi_sig] != den[den_hi_sig])
754 quo_est = work / den[den_hi_sig];
758 /* Refine quo_est so it's usually correct, and at most one high. */
759 tmp = work - quo_est * den[den_hi_sig];
761 && (den[den_hi_sig - 1] * quo_est
762 > (tmp * BASE + num[num_hi_sig - 2])))
765 /* Try QUO_EST as the quotient digit, by multiplying the
766 divisor by QUO_EST and subtracting from the remaining dividend.
767 Keep in mind that QUO_EST is the I - 1st digit. */
770 for (j = 0; j <= den_hi_sig; j++)
772 work = quo_est * den[j] + carry;
773 carry = HIGHPART (work);
774 work = num[i + j] - LOWPART (work);
775 num[i + j] = LOWPART (work);
776 carry += HIGHPART (work) != 0;
779 /* If quo_est was high by one, then num[i] went negative and
780 we need to correct things. */
781 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
784 carry = 0; /* add divisor back in */
785 for (j = 0; j <= den_hi_sig; j++)
787 work = num[i + j] + den[j] + carry;
788 carry = HIGHPART (work);
789 num[i + j] = LOWPART (work);
792 num [num_hi_sig] += carry;
795 /* Store the quotient digit. */
800 decode (quo, lquo, hquo);
803 /* If result is negative, make it so. */
805 neg_double (*lquo, *hquo, lquo, hquo);
807 /* Compute trial remainder: rem = num - (quo * den) */
808 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
809 neg_double (*lrem, *hrem, lrem, hrem);
810 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
815 case TRUNC_MOD_EXPR: /* round toward zero */
816 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
820 case FLOOR_MOD_EXPR: /* round toward negative infinity */
821 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
824 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
832 case CEIL_MOD_EXPR: /* round toward positive infinity */
833 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
835 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
843 case ROUND_MOD_EXPR: /* round to closest integer */
845 unsigned HOST_WIDE_INT labs_rem = *lrem;
846 HOST_WIDE_INT habs_rem = *hrem;
847 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
848 HOST_WIDE_INT habs_den = hden, htwice;
850 /* Get absolute values. */
852 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
854 neg_double (lden, hden, &labs_den, &habs_den);
856 /* If (2 * abs (lrem) >= abs (lden)) */
857 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
858 labs_rem, habs_rem, <wice, &htwice);
860 if (((unsigned HOST_WIDE_INT) habs_den
861 < (unsigned HOST_WIDE_INT) htwice)
862 || (((unsigned HOST_WIDE_INT) habs_den
863 == (unsigned HOST_WIDE_INT) htwice)
864 && (labs_den < ltwice)))
868 add_double (*lquo, *hquo,
869 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
872 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
884 /* Compute true remainder: rem = num - (quo * den) */
885 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
886 neg_double (*lrem, *hrem, lrem, hrem);
887 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
891 /* If ARG2 divides ARG1 with zero remainder, carries out the division
892 of type CODE and returns the quotient.
893 Otherwise returns NULL_TREE. */
896 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
898 unsigned HOST_WIDE_INT int1l, int2l;
899 HOST_WIDE_INT int1h, int2h;
900 unsigned HOST_WIDE_INT quol, reml;
901 HOST_WIDE_INT quoh, remh;
902 tree type = TREE_TYPE (arg1);
903 int uns = TYPE_UNSIGNED (type);
905 int1l = TREE_INT_CST_LOW (arg1);
906 int1h = TREE_INT_CST_HIGH (arg1);
907 int2l = TREE_INT_CST_LOW (arg2);
908 int2h = TREE_INT_CST_HIGH (arg2);
910 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
911 &quol, &quoh, &reml, &remh);
912 if (remh != 0 || reml != 0)
915 return build_int_cst_wide (type, quol, quoh);
918 /* Return true if the built-in mathematical function specified by CODE
919 is odd, i.e. -f(x) == f(-x). */
922 negate_mathfn_p (enum built_in_function code)
926 CASE_FLT_FN (BUILT_IN_ASIN):
927 CASE_FLT_FN (BUILT_IN_ASINH):
928 CASE_FLT_FN (BUILT_IN_ATAN):
929 CASE_FLT_FN (BUILT_IN_ATANH):
930 CASE_FLT_FN (BUILT_IN_CBRT):
931 CASE_FLT_FN (BUILT_IN_ERF):
932 CASE_FLT_FN (BUILT_IN_LLROUND):
933 CASE_FLT_FN (BUILT_IN_LROUND):
934 CASE_FLT_FN (BUILT_IN_ROUND):
935 CASE_FLT_FN (BUILT_IN_SIN):
936 CASE_FLT_FN (BUILT_IN_SINH):
937 CASE_FLT_FN (BUILT_IN_TAN):
938 CASE_FLT_FN (BUILT_IN_TANH):
939 CASE_FLT_FN (BUILT_IN_TRUNC):
942 CASE_FLT_FN (BUILT_IN_LLRINT):
943 CASE_FLT_FN (BUILT_IN_LRINT):
944 CASE_FLT_FN (BUILT_IN_NEARBYINT):
945 CASE_FLT_FN (BUILT_IN_RINT):
946 return !flag_rounding_math;
954 /* Check whether we may negate an integer constant T without causing
958 may_negate_without_overflow_p (tree t)
960 unsigned HOST_WIDE_INT val;
964 gcc_assert (TREE_CODE (t) == INTEGER_CST);
966 type = TREE_TYPE (t);
967 if (TYPE_UNSIGNED (type))
970 prec = TYPE_PRECISION (type);
971 if (prec > HOST_BITS_PER_WIDE_INT)
973 if (TREE_INT_CST_LOW (t) != 0)
975 prec -= HOST_BITS_PER_WIDE_INT;
976 val = TREE_INT_CST_HIGH (t);
979 val = TREE_INT_CST_LOW (t);
980 if (prec < HOST_BITS_PER_WIDE_INT)
981 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
982 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
985 /* Determine whether an expression T can be cheaply negated using
986 the function negate_expr without introducing undefined overflow. */
989 negate_expr_p (tree t)
996 type = TREE_TYPE (t);
999 switch (TREE_CODE (t))
1002 if (TYPE_UNSIGNED (type)
1003 || (flag_wrapv && ! flag_trapv))
1006 /* Check that -CST will not overflow type. */
1007 return may_negate_without_overflow_p (t);
1009 return INTEGRAL_TYPE_P (type)
1010 && (TYPE_UNSIGNED (type)
1011 || (flag_wrapv && !flag_trapv));
1018 return negate_expr_p (TREE_REALPART (t))
1019 && negate_expr_p (TREE_IMAGPART (t));
1022 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1023 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1025 /* -(A + B) -> (-B) - A. */
1026 if (negate_expr_p (TREE_OPERAND (t, 1))
1027 && reorder_operands_p (TREE_OPERAND (t, 0),
1028 TREE_OPERAND (t, 1)))
1030 /* -(A + B) -> (-A) - B. */
1031 return negate_expr_p (TREE_OPERAND (t, 0));
1034 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1035 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1036 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1037 && reorder_operands_p (TREE_OPERAND (t, 0),
1038 TREE_OPERAND (t, 1));
1041 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1047 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1048 return negate_expr_p (TREE_OPERAND (t, 1))
1049 || negate_expr_p (TREE_OPERAND (t, 0));
1052 case TRUNC_DIV_EXPR:
1053 case ROUND_DIV_EXPR:
1054 case FLOOR_DIV_EXPR:
1056 case EXACT_DIV_EXPR:
1057 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1059 return negate_expr_p (TREE_OPERAND (t, 1))
1060 || negate_expr_p (TREE_OPERAND (t, 0));
1063 /* Negate -((double)float) as (double)(-float). */
1064 if (TREE_CODE (type) == REAL_TYPE)
1066 tree tem = strip_float_extensions (t);
1068 return negate_expr_p (tem);
1073 /* Negate -f(x) as f(-x). */
1074 if (negate_mathfn_p (builtin_mathfn_code (t)))
1075 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1079 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1080 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1082 tree op1 = TREE_OPERAND (t, 1);
1083 if (TREE_INT_CST_HIGH (op1) == 0
1084 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1085 == TREE_INT_CST_LOW (op1))
1096 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1097 simplification is possible.
1098 If negate_expr_p would return true for T, NULL_TREE will never be
1102 fold_negate_expr (tree t)
1104 tree type = TREE_TYPE (t);
1107 switch (TREE_CODE (t))
1109 /* Convert - (~A) to A + 1. */
1111 if (INTEGRAL_TYPE_P (type))
1112 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1113 build_int_cst (type, 1));
1117 tem = fold_negate_const (t, type);
1118 if (! TREE_OVERFLOW (tem)
1119 || TYPE_UNSIGNED (type)
1125 tem = fold_negate_const (t, type);
1126 /* Two's complement FP formats, such as c4x, may overflow. */
1127 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1133 tree rpart = negate_expr (TREE_REALPART (t));
1134 tree ipart = negate_expr (TREE_IMAGPART (t));
1136 if ((TREE_CODE (rpart) == REAL_CST
1137 && TREE_CODE (ipart) == REAL_CST)
1138 || (TREE_CODE (rpart) == INTEGER_CST
1139 && TREE_CODE (ipart) == INTEGER_CST))
1140 return build_complex (type, rpart, ipart);
1145 return TREE_OPERAND (t, 0);
1148 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1156 tem = negate_expr (TREE_OPERAND (t, 1));
1157 return fold_build2 (MINUS_EXPR, type,
1158 tem, TREE_OPERAND (t, 0));
1161 /* -(A + B) -> (-A) - B. */
1162 if (negate_expr_p (TREE_OPERAND (t, 0)))
1164 tem = negate_expr (TREE_OPERAND (t, 0));
1165 return fold_build2 (MINUS_EXPR, type,
1166 tem, TREE_OPERAND (t, 1));
1172 /* - (A - B) -> B - A */
1173 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1174 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1175 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1176 return fold_build2 (MINUS_EXPR, type,
1177 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1181 if (TYPE_UNSIGNED (type))
1187 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1189 tem = TREE_OPERAND (t, 1);
1190 if (negate_expr_p (tem))
1191 return fold_build2 (TREE_CODE (t), type,
1192 TREE_OPERAND (t, 0), negate_expr (tem));
1193 tem = TREE_OPERAND (t, 0);
1194 if (negate_expr_p (tem))
1195 return fold_build2 (TREE_CODE (t), type,
1196 negate_expr (tem), TREE_OPERAND (t, 1));
1200 case TRUNC_DIV_EXPR:
1201 case ROUND_DIV_EXPR:
1202 case FLOOR_DIV_EXPR:
1204 case EXACT_DIV_EXPR:
1205 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1207 tem = TREE_OPERAND (t, 1);
1208 if (negate_expr_p (tem))
1209 return fold_build2 (TREE_CODE (t), type,
1210 TREE_OPERAND (t, 0), negate_expr (tem));
1211 tem = TREE_OPERAND (t, 0);
1212 if (negate_expr_p (tem))
1213 return fold_build2 (TREE_CODE (t), type,
1214 negate_expr (tem), TREE_OPERAND (t, 1));
1219 /* Convert -((double)float) into (double)(-float). */
1220 if (TREE_CODE (type) == REAL_TYPE)
1222 tem = strip_float_extensions (t);
1223 if (tem != t && negate_expr_p (tem))
1224 return negate_expr (tem);
1229 /* Negate -f(x) as f(-x). */
1230 if (negate_mathfn_p (builtin_mathfn_code (t))
1231 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1233 tree fndecl, arg, arglist;
1235 fndecl = get_callee_fndecl (t);
1236 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1237 arglist = build_tree_list (NULL_TREE, arg);
1238 return build_function_call_expr (fndecl, arglist);
1243 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1244 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1246 tree op1 = TREE_OPERAND (t, 1);
1247 if (TREE_INT_CST_HIGH (op1) == 0
1248 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1249 == TREE_INT_CST_LOW (op1))
1251 tree ntype = TYPE_UNSIGNED (type)
1252 ? lang_hooks.types.signed_type (type)
1253 : lang_hooks.types.unsigned_type (type);
1254 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1255 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1256 return fold_convert (type, temp);
1268 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1269 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1270 return NULL_TREE. */
1273 negate_expr (tree t)
1280 type = TREE_TYPE (t);
1281 STRIP_SIGN_NOPS (t);
1283 tem = fold_negate_expr (t);
1285 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1286 return fold_convert (type, tem);
1289 /* Split a tree IN into a constant, literal and variable parts that could be
1290 combined with CODE to make IN. "constant" means an expression with
1291 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1292 commutative arithmetic operation. Store the constant part into *CONP,
1293 the literal in *LITP and return the variable part. If a part isn't
1294 present, set it to null. If the tree does not decompose in this way,
1295 return the entire tree as the variable part and the other parts as null.
1297 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1298 case, we negate an operand that was subtracted. Except if it is a
1299 literal for which we use *MINUS_LITP instead.
1301 If NEGATE_P is true, we are negating all of IN, again except a literal
1302 for which we use *MINUS_LITP instead.
1304 If IN is itself a literal or constant, return it as appropriate.
1306 Note that we do not guarantee that any of the three values will be the
1307 same type as IN, but they will have the same signedness and mode. */
1310 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1311 tree *minus_litp, int negate_p)
1319 /* Strip any conversions that don't change the machine mode or signedness. */
1320 STRIP_SIGN_NOPS (in);
1322 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1324 else if (TREE_CODE (in) == code
1325 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1326 /* We can associate addition and subtraction together (even
1327 though the C standard doesn't say so) for integers because
1328 the value is not affected. For reals, the value might be
1329 affected, so we can't. */
1330 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1331 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1333 tree op0 = TREE_OPERAND (in, 0);
1334 tree op1 = TREE_OPERAND (in, 1);
1335 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1336 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1338 /* First see if either of the operands is a literal, then a constant. */
1339 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1340 *litp = op0, op0 = 0;
1341 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1342 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1344 if (op0 != 0 && TREE_CONSTANT (op0))
1345 *conp = op0, op0 = 0;
1346 else if (op1 != 0 && TREE_CONSTANT (op1))
1347 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1349 /* If we haven't dealt with either operand, this is not a case we can
1350 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1351 if (op0 != 0 && op1 != 0)
1356 var = op1, neg_var_p = neg1_p;
1358 /* Now do any needed negations. */
1360 *minus_litp = *litp, *litp = 0;
1362 *conp = negate_expr (*conp);
1364 var = negate_expr (var);
1366 else if (TREE_CONSTANT (in))
1374 *minus_litp = *litp, *litp = 0;
1375 else if (*minus_litp)
1376 *litp = *minus_litp, *minus_litp = 0;
1377 *conp = negate_expr (*conp);
1378 var = negate_expr (var);
1384 /* Re-associate trees split by the above function. T1 and T2 are either
1385 expressions to associate or null. Return the new expression, if any. If
1386 we build an operation, do it in TYPE and with CODE. */
1389 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1396 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1397 try to fold this since we will have infinite recursion. But do
1398 deal with any NEGATE_EXPRs. */
1399 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1400 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1402 if (code == PLUS_EXPR)
1404 if (TREE_CODE (t1) == NEGATE_EXPR)
1405 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1406 fold_convert (type, TREE_OPERAND (t1, 0)));
1407 else if (TREE_CODE (t2) == NEGATE_EXPR)
1408 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1409 fold_convert (type, TREE_OPERAND (t2, 0)));
1410 else if (integer_zerop (t2))
1411 return fold_convert (type, t1);
1413 else if (code == MINUS_EXPR)
1415 if (integer_zerop (t2))
1416 return fold_convert (type, t1);
1419 return build2 (code, type, fold_convert (type, t1),
1420 fold_convert (type, t2));
1423 return fold_build2 (code, type, fold_convert (type, t1),
1424 fold_convert (type, t2));
1427 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1428 for use in int_const_binop, size_binop and size_diffop. */
1431 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1433 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1435 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1450 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1451 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1452 && TYPE_MODE (type1) == TYPE_MODE (type2);
1456 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1457 to produce a new constant. Return NULL_TREE if we don't know how
1458 to evaluate CODE at compile-time.
1460 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1463 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1465 unsigned HOST_WIDE_INT int1l, int2l;
1466 HOST_WIDE_INT int1h, int2h;
1467 unsigned HOST_WIDE_INT low;
1469 unsigned HOST_WIDE_INT garbagel;
1470 HOST_WIDE_INT garbageh;
1472 tree type = TREE_TYPE (arg1);
1473 int uns = TYPE_UNSIGNED (type);
1475 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1478 int1l = TREE_INT_CST_LOW (arg1);
1479 int1h = TREE_INT_CST_HIGH (arg1);
1480 int2l = TREE_INT_CST_LOW (arg2);
1481 int2h = TREE_INT_CST_HIGH (arg2);
1486 low = int1l | int2l, hi = int1h | int2h;
1490 low = int1l ^ int2l, hi = int1h ^ int2h;
1494 low = int1l & int2l, hi = int1h & int2h;
1500 /* It's unclear from the C standard whether shifts can overflow.
1501 The following code ignores overflow; perhaps a C standard
1502 interpretation ruling is needed. */
1503 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1510 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1515 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1519 neg_double (int2l, int2h, &low, &hi);
1520 add_double (int1l, int1h, low, hi, &low, &hi);
1521 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1525 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1528 case TRUNC_DIV_EXPR:
1529 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1530 case EXACT_DIV_EXPR:
1531 /* This is a shortcut for a common special case. */
1532 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1533 && ! TREE_CONSTANT_OVERFLOW (arg1)
1534 && ! TREE_CONSTANT_OVERFLOW (arg2)
1535 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1537 if (code == CEIL_DIV_EXPR)
1540 low = int1l / int2l, hi = 0;
1544 /* ... fall through ... */
1546 case ROUND_DIV_EXPR:
1547 if (int2h == 0 && int2l == 0)
1549 if (int2h == 0 && int2l == 1)
1551 low = int1l, hi = int1h;
1554 if (int1l == int2l && int1h == int2h
1555 && ! (int1l == 0 && int1h == 0))
1560 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1561 &low, &hi, &garbagel, &garbageh);
1564 case TRUNC_MOD_EXPR:
1565 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1566 /* This is a shortcut for a common special case. */
1567 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1568 && ! TREE_CONSTANT_OVERFLOW (arg1)
1569 && ! TREE_CONSTANT_OVERFLOW (arg2)
1570 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1572 if (code == CEIL_MOD_EXPR)
1574 low = int1l % int2l, hi = 0;
1578 /* ... fall through ... */
1580 case ROUND_MOD_EXPR:
1581 if (int2h == 0 && int2l == 0)
1583 overflow = div_and_round_double (code, uns,
1584 int1l, int1h, int2l, int2h,
1585 &garbagel, &garbageh, &low, &hi);
1591 low = (((unsigned HOST_WIDE_INT) int1h
1592 < (unsigned HOST_WIDE_INT) int2h)
1593 || (((unsigned HOST_WIDE_INT) int1h
1594 == (unsigned HOST_WIDE_INT) int2h)
1597 low = (int1h < int2h
1598 || (int1h == int2h && int1l < int2l));
1600 if (low == (code == MIN_EXPR))
1601 low = int1l, hi = int1h;
1603 low = int2l, hi = int2h;
1612 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1614 /* Propagate overflow flags ourselves. */
1615 if (((!uns || is_sizetype) && overflow)
1616 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1619 TREE_OVERFLOW (t) = 1;
1620 TREE_CONSTANT_OVERFLOW (t) = 1;
1622 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1625 TREE_CONSTANT_OVERFLOW (t) = 1;
1629 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1630 ((!uns || is_sizetype) && overflow)
1631 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1632 TREE_CONSTANT_OVERFLOW (arg1)
1633 | TREE_CONSTANT_OVERFLOW (arg2));
1638 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1639 constant. We assume ARG1 and ARG2 have the same data type, or at least
1640 are the same kind of constant and the same machine mode. Return zero if
1641 combining the constants is not allowed in the current operating mode.
1643 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1646 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1648 /* Sanity check for the recursive cases. */
1655 if (TREE_CODE (arg1) == INTEGER_CST)
1656 return int_const_binop (code, arg1, arg2, notrunc);
1658 if (TREE_CODE (arg1) == REAL_CST)
1660 enum machine_mode mode;
1663 REAL_VALUE_TYPE value;
1664 REAL_VALUE_TYPE result;
1668 /* The following codes are handled by real_arithmetic. */
1683 d1 = TREE_REAL_CST (arg1);
1684 d2 = TREE_REAL_CST (arg2);
1686 type = TREE_TYPE (arg1);
1687 mode = TYPE_MODE (type);
1689 /* Don't perform operation if we honor signaling NaNs and
1690 either operand is a NaN. */
1691 if (HONOR_SNANS (mode)
1692 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1695 /* Don't perform operation if it would raise a division
1696 by zero exception. */
1697 if (code == RDIV_EXPR
1698 && REAL_VALUES_EQUAL (d2, dconst0)
1699 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1702 /* If either operand is a NaN, just return it. Otherwise, set up
1703 for floating-point trap; we return an overflow. */
1704 if (REAL_VALUE_ISNAN (d1))
1706 else if (REAL_VALUE_ISNAN (d2))
1709 inexact = real_arithmetic (&value, code, &d1, &d2);
1710 real_convert (&result, mode, &value);
1712 /* Don't constant fold this floating point operation if
1713 the result has overflowed and flag_trapping_math. */
1714 if (flag_trapping_math
1715 && MODE_HAS_INFINITIES (mode)
1716 && REAL_VALUE_ISINF (result)
1717 && !REAL_VALUE_ISINF (d1)
1718 && !REAL_VALUE_ISINF (d2))
1721 /* Don't constant fold this floating point operation if the
1722 result may dependent upon the run-time rounding mode and
1723 flag_rounding_math is set, or if GCC's software emulation
1724 is unable to accurately represent the result. */
1725 if ((flag_rounding_math
1726 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1727 && !flag_unsafe_math_optimizations))
1728 && (inexact || !real_identical (&result, &value)))
1731 t = build_real (type, result);
1733 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1734 TREE_CONSTANT_OVERFLOW (t)
1736 | TREE_CONSTANT_OVERFLOW (arg1)
1737 | TREE_CONSTANT_OVERFLOW (arg2);
1741 if (TREE_CODE (arg1) == COMPLEX_CST)
1743 tree type = TREE_TYPE (arg1);
1744 tree r1 = TREE_REALPART (arg1);
1745 tree i1 = TREE_IMAGPART (arg1);
1746 tree r2 = TREE_REALPART (arg2);
1747 tree i2 = TREE_IMAGPART (arg2);
1754 real = const_binop (code, r1, r2, notrunc);
1755 imag = const_binop (code, i1, i2, notrunc);
1759 real = const_binop (MINUS_EXPR,
1760 const_binop (MULT_EXPR, r1, r2, notrunc),
1761 const_binop (MULT_EXPR, i1, i2, notrunc),
1763 imag = const_binop (PLUS_EXPR,
1764 const_binop (MULT_EXPR, r1, i2, notrunc),
1765 const_binop (MULT_EXPR, i1, r2, notrunc),
1772 = const_binop (PLUS_EXPR,
1773 const_binop (MULT_EXPR, r2, r2, notrunc),
1774 const_binop (MULT_EXPR, i2, i2, notrunc),
1777 = const_binop (PLUS_EXPR,
1778 const_binop (MULT_EXPR, r1, r2, notrunc),
1779 const_binop (MULT_EXPR, i1, i2, notrunc),
1782 = const_binop (MINUS_EXPR,
1783 const_binop (MULT_EXPR, i1, r2, notrunc),
1784 const_binop (MULT_EXPR, r1, i2, notrunc),
1787 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1788 code = TRUNC_DIV_EXPR;
1790 real = const_binop (code, t1, magsquared, notrunc);
1791 imag = const_binop (code, t2, magsquared, notrunc);
1800 return build_complex (type, real, imag);
1806 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1807 indicates which particular sizetype to create. */
1810 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1812 return build_int_cst (sizetype_tab[(int) kind], number);
1815 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1816 is a tree code. The type of the result is taken from the operands.
1817 Both must be equivalent integer types, ala int_binop_types_match_p.
1818 If the operands are constant, so is the result. */
1821 size_binop (enum tree_code code, tree arg0, tree arg1)
1823 tree type = TREE_TYPE (arg0);
1825 if (arg0 == error_mark_node || arg1 == error_mark_node)
1826 return error_mark_node;
1828 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1831 /* Handle the special case of two integer constants faster. */
1832 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1834 /* And some specific cases even faster than that. */
1835 if (code == PLUS_EXPR && integer_zerop (arg0))
1837 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1838 && integer_zerop (arg1))
1840 else if (code == MULT_EXPR && integer_onep (arg0))
1843 /* Handle general case of two integer constants. */
1844 return int_const_binop (code, arg0, arg1, 0);
1847 return fold_build2 (code, type, arg0, arg1);
1850 /* Given two values, either both of sizetype or both of bitsizetype,
1851 compute the difference between the two values. Return the value
1852 in signed type corresponding to the type of the operands. */
1855 size_diffop (tree arg0, tree arg1)
1857 tree type = TREE_TYPE (arg0);
1860 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1863 /* If the type is already signed, just do the simple thing. */
1864 if (!TYPE_UNSIGNED (type))
1865 return size_binop (MINUS_EXPR, arg0, arg1);
1867 if (type == sizetype)
1869 else if (type == bitsizetype)
1870 ctype = sbitsizetype;
1872 ctype = lang_hooks.types.signed_type (type);
1874 /* If either operand is not a constant, do the conversions to the signed
1875 type and subtract. The hardware will do the right thing with any
1876 overflow in the subtraction. */
1877 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1878 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1879 fold_convert (ctype, arg1));
1881 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1882 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1883 overflow) and negate (which can't either). Special-case a result
1884 of zero while we're here. */
1885 if (tree_int_cst_equal (arg0, arg1))
1886 return build_int_cst (ctype, 0);
1887 else if (tree_int_cst_lt (arg1, arg0))
1888 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1890 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1891 fold_convert (ctype, size_binop (MINUS_EXPR,
1895 /* A subroutine of fold_convert_const handling conversions of an
1896 INTEGER_CST to another integer type. */
1899 fold_convert_const_int_from_int (tree type, tree arg1)
1903 /* Given an integer constant, make new constant with new type,
1904 appropriately sign-extended or truncated. */
1905 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1906 TREE_INT_CST_HIGH (arg1),
1907 /* Don't set the overflow when
1908 converting a pointer */
1909 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1910 (TREE_INT_CST_HIGH (arg1) < 0
1911 && (TYPE_UNSIGNED (type)
1912 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1913 | TREE_OVERFLOW (arg1),
1914 TREE_CONSTANT_OVERFLOW (arg1));
1919 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1920 to an integer type. */
1923 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1928 /* The following code implements the floating point to integer
1929 conversion rules required by the Java Language Specification,
1930 that IEEE NaNs are mapped to zero and values that overflow
1931 the target precision saturate, i.e. values greater than
1932 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1933 are mapped to INT_MIN. These semantics are allowed by the
1934 C and C++ standards that simply state that the behavior of
1935 FP-to-integer conversion is unspecified upon overflow. */
1937 HOST_WIDE_INT high, low;
1939 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1943 case FIX_TRUNC_EXPR:
1944 real_trunc (&r, VOIDmode, &x);
1951 /* If R is NaN, return zero and show we have an overflow. */
1952 if (REAL_VALUE_ISNAN (r))
1959 /* See if R is less than the lower bound or greater than the
1964 tree lt = TYPE_MIN_VALUE (type);
1965 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1966 if (REAL_VALUES_LESS (r, l))
1969 high = TREE_INT_CST_HIGH (lt);
1970 low = TREE_INT_CST_LOW (lt);
1976 tree ut = TYPE_MAX_VALUE (type);
1979 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1980 if (REAL_VALUES_LESS (u, r))
1983 high = TREE_INT_CST_HIGH (ut);
1984 low = TREE_INT_CST_LOW (ut);
1990 REAL_VALUE_TO_INT (&low, &high, r);
1992 t = force_fit_type_double (type, low, high, -1,
1993 overflow | TREE_OVERFLOW (arg1),
1994 TREE_CONSTANT_OVERFLOW (arg1));
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999 to another floating point type. */
2002 fold_convert_const_real_from_real (tree type, tree arg1)
2004 REAL_VALUE_TYPE value;
2007 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2008 t = build_real (type, value);
2010 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2011 TREE_CONSTANT_OVERFLOW (t)
2012 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2016 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2017 type TYPE. If no simplification can be done return NULL_TREE. */
2020 fold_convert_const (enum tree_code code, tree type, tree arg1)
2022 if (TREE_TYPE (arg1) == type)
2025 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2027 if (TREE_CODE (arg1) == INTEGER_CST)
2028 return fold_convert_const_int_from_int (type, arg1);
2029 else if (TREE_CODE (arg1) == REAL_CST)
2030 return fold_convert_const_int_from_real (code, type, arg1);
2032 else if (TREE_CODE (type) == REAL_TYPE)
2034 if (TREE_CODE (arg1) == INTEGER_CST)
2035 return build_real_from_int_cst (type, arg1);
2036 if (TREE_CODE (arg1) == REAL_CST)
2037 return fold_convert_const_real_from_real (type, arg1);
2042 /* Construct a vector of zero elements of vector type TYPE. */
2045 build_zero_vector (tree type)
2050 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2051 units = TYPE_VECTOR_SUBPARTS (type);
2054 for (i = 0; i < units; i++)
2055 list = tree_cons (NULL_TREE, elem, list);
2056 return build_vector (type, list);
2059 /* Convert expression ARG to type TYPE. Used by the middle-end for
2060 simple conversions in preference to calling the front-end's convert. */
2063 fold_convert (tree type, tree arg)
2065 tree orig = TREE_TYPE (arg);
2071 if (TREE_CODE (arg) == ERROR_MARK
2072 || TREE_CODE (type) == ERROR_MARK
2073 || TREE_CODE (orig) == ERROR_MARK)
2074 return error_mark_node;
2076 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2077 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2078 TYPE_MAIN_VARIANT (orig)))
2079 return fold_build1 (NOP_EXPR, type, arg);
2081 switch (TREE_CODE (type))
2083 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2084 case POINTER_TYPE: case REFERENCE_TYPE:
2086 if (TREE_CODE (arg) == INTEGER_CST)
2088 tem = fold_convert_const (NOP_EXPR, type, arg);
2089 if (tem != NULL_TREE)
2092 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2093 || TREE_CODE (orig) == OFFSET_TYPE)
2094 return fold_build1 (NOP_EXPR, type, arg);
2095 if (TREE_CODE (orig) == COMPLEX_TYPE)
2097 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2098 return fold_convert (type, tem);
2100 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2101 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2102 return fold_build1 (NOP_EXPR, type, arg);
2105 if (TREE_CODE (arg) == INTEGER_CST)
2107 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2108 if (tem != NULL_TREE)
2111 else if (TREE_CODE (arg) == REAL_CST)
2113 tem = fold_convert_const (NOP_EXPR, type, arg);
2114 if (tem != NULL_TREE)
2118 switch (TREE_CODE (orig))
2121 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2122 case POINTER_TYPE: case REFERENCE_TYPE:
2123 return fold_build1 (FLOAT_EXPR, type, arg);
2126 return fold_build1 (NOP_EXPR, type, arg);
2129 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2130 return fold_convert (type, tem);
2137 switch (TREE_CODE (orig))
2140 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2141 case POINTER_TYPE: case REFERENCE_TYPE:
2143 return build2 (COMPLEX_EXPR, type,
2144 fold_convert (TREE_TYPE (type), arg),
2145 fold_convert (TREE_TYPE (type), integer_zero_node));
2150 if (TREE_CODE (arg) == COMPLEX_EXPR)
2152 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2153 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2154 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2157 arg = save_expr (arg);
2158 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2159 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2160 rpart = fold_convert (TREE_TYPE (type), rpart);
2161 ipart = fold_convert (TREE_TYPE (type), ipart);
2162 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2170 if (integer_zerop (arg))
2171 return build_zero_vector (type);
2172 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2173 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2174 || TREE_CODE (orig) == VECTOR_TYPE);
2175 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2178 tem = fold_ignored_result (arg);
2179 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2181 return fold_build1 (NOP_EXPR, type, tem);
2188 /* Return false if expr can be assumed not to be an lvalue, true
2192 maybe_lvalue_p (tree x)
2194 /* We only need to wrap lvalue tree codes. */
2195 switch (TREE_CODE (x))
2206 case ALIGN_INDIRECT_REF:
2207 case MISALIGNED_INDIRECT_REF:
2209 case ARRAY_RANGE_REF:
2215 case PREINCREMENT_EXPR:
2216 case PREDECREMENT_EXPR:
2218 case TRY_CATCH_EXPR:
2219 case WITH_CLEANUP_EXPR:
2222 case GIMPLE_MODIFY_STMT:
2231 /* Assume the worst for front-end tree codes. */
2232 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2240 /* Return an expr equal to X but certainly not valid as an lvalue. */
2245 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2250 if (! maybe_lvalue_p (x))
2252 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2255 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2256 Zero means allow extended lvalues. */
2258 int pedantic_lvalues;
2260 /* When pedantic, return an expr equal to X but certainly not valid as a
2261 pedantic lvalue. Otherwise, return X. */
2264 pedantic_non_lvalue (tree x)
2266 if (pedantic_lvalues)
2267 return non_lvalue (x);
2272 /* Given a tree comparison code, return the code that is the logical inverse
2273 of the given code. It is not safe to do this for floating-point
2274 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2275 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2278 invert_tree_comparison (enum tree_code code, bool honor_nans)
2280 if (honor_nans && flag_trapping_math)
2290 return honor_nans ? UNLE_EXPR : LE_EXPR;
2292 return honor_nans ? UNLT_EXPR : LT_EXPR;
2294 return honor_nans ? UNGE_EXPR : GE_EXPR;
2296 return honor_nans ? UNGT_EXPR : GT_EXPR;
2310 return UNORDERED_EXPR;
2311 case UNORDERED_EXPR:
2312 return ORDERED_EXPR;
2318 /* Similar, but return the comparison that results if the operands are
2319 swapped. This is safe for floating-point. */
2322 swap_tree_comparison (enum tree_code code)
2329 case UNORDERED_EXPR:
2355 /* Convert a comparison tree code from an enum tree_code representation
2356 into a compcode bit-based encoding. This function is the inverse of
2357 compcode_to_comparison. */
2359 static enum comparison_code
2360 comparison_to_compcode (enum tree_code code)
2377 return COMPCODE_ORD;
2378 case UNORDERED_EXPR:
2379 return COMPCODE_UNORD;
2381 return COMPCODE_UNLT;
2383 return COMPCODE_UNEQ;
2385 return COMPCODE_UNLE;
2387 return COMPCODE_UNGT;
2389 return COMPCODE_LTGT;
2391 return COMPCODE_UNGE;
2397 /* Convert a compcode bit-based encoding of a comparison operator back
2398 to GCC's enum tree_code representation. This function is the
2399 inverse of comparison_to_compcode. */
2401 static enum tree_code
2402 compcode_to_comparison (enum comparison_code code)
2419 return ORDERED_EXPR;
2420 case COMPCODE_UNORD:
2421 return UNORDERED_EXPR;
2439 /* Return a tree for the comparison which is the combination of
2440 doing the AND or OR (depending on CODE) of the two operations LCODE
2441 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2442 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2443 if this makes the transformation invalid. */
2446 combine_comparisons (enum tree_code code, enum tree_code lcode,
2447 enum tree_code rcode, tree truth_type,
2448 tree ll_arg, tree lr_arg)
2450 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2451 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2452 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2453 enum comparison_code compcode;
2457 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2458 compcode = lcompcode & rcompcode;
2461 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2462 compcode = lcompcode | rcompcode;
2471 /* Eliminate unordered comparisons, as well as LTGT and ORD
2472 which are not used unless the mode has NaNs. */
2473 compcode &= ~COMPCODE_UNORD;
2474 if (compcode == COMPCODE_LTGT)
2475 compcode = COMPCODE_NE;
2476 else if (compcode == COMPCODE_ORD)
2477 compcode = COMPCODE_TRUE;
2479 else if (flag_trapping_math)
2481 /* Check that the original operation and the optimized ones will trap
2482 under the same condition. */
2483 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2484 && (lcompcode != COMPCODE_EQ)
2485 && (lcompcode != COMPCODE_ORD);
2486 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2487 && (rcompcode != COMPCODE_EQ)
2488 && (rcompcode != COMPCODE_ORD);
2489 bool trap = (compcode & COMPCODE_UNORD) == 0
2490 && (compcode != COMPCODE_EQ)
2491 && (compcode != COMPCODE_ORD);
2493 /* In a short-circuited boolean expression the LHS might be
2494 such that the RHS, if evaluated, will never trap. For
2495 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2496 if neither x nor y is NaN. (This is a mixed blessing: for
2497 example, the expression above will never trap, hence
2498 optimizing it to x < y would be invalid). */
2499 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2500 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2503 /* If the comparison was short-circuited, and only the RHS
2504 trapped, we may now generate a spurious trap. */
2506 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2509 /* If we changed the conditions that cause a trap, we lose. */
2510 if ((ltrap || rtrap) != trap)
2514 if (compcode == COMPCODE_TRUE)
2515 return constant_boolean_node (true, truth_type);
2516 else if (compcode == COMPCODE_FALSE)
2517 return constant_boolean_node (false, truth_type);
2519 return fold_build2 (compcode_to_comparison (compcode),
2520 truth_type, ll_arg, lr_arg);
2523 /* Return nonzero if CODE is a tree code that represents a truth value. */
2526 truth_value_p (enum tree_code code)
2528 return (TREE_CODE_CLASS (code) == tcc_comparison
2529 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2530 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2531 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2534 /* Return nonzero if two operands (typically of the same tree node)
2535 are necessarily equal. If either argument has side-effects this
2536 function returns zero. FLAGS modifies behavior as follows:
2538 If OEP_ONLY_CONST is set, only return nonzero for constants.
2539 This function tests whether the operands are indistinguishable;
2540 it does not test whether they are equal using C's == operation.
2541 The distinction is important for IEEE floating point, because
2542 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2543 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2545 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2546 even though it may hold multiple values during a function.
2547 This is because a GCC tree node guarantees that nothing else is
2548 executed between the evaluation of its "operands" (which may often
2549 be evaluated in arbitrary order). Hence if the operands themselves
2550 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2551 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2552 unset means assuming isochronic (or instantaneous) tree equivalence.
2553 Unless comparing arbitrary expression trees, such as from different
2554 statements, this flag can usually be left unset.
2556 If OEP_PURE_SAME is set, then pure functions with identical arguments
2557 are considered the same. It is used when the caller has other ways
2558 to ensure that global memory is unchanged in between. */
2561 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2563 /* If either is ERROR_MARK, they aren't equal. */
2564 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2567 /* If both types don't have the same signedness, then we can't consider
2568 them equal. We must check this before the STRIP_NOPS calls
2569 because they may change the signedness of the arguments. */
2570 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2573 /* If both types don't have the same precision, then it is not safe
2575 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2581 /* In case both args are comparisons but with different comparison
2582 code, try to swap the comparison operands of one arg to produce
2583 a match and compare that variant. */
2584 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2585 && COMPARISON_CLASS_P (arg0)
2586 && COMPARISON_CLASS_P (arg1))
2588 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2590 if (TREE_CODE (arg0) == swap_code)
2591 return operand_equal_p (TREE_OPERAND (arg0, 0),
2592 TREE_OPERAND (arg1, 1), flags)
2593 && operand_equal_p (TREE_OPERAND (arg0, 1),
2594 TREE_OPERAND (arg1, 0), flags);
2597 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2598 /* This is needed for conversions and for COMPONENT_REF.
2599 Might as well play it safe and always test this. */
2600 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2601 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2602 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2605 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2606 We don't care about side effects in that case because the SAVE_EXPR
2607 takes care of that for us. In all other cases, two expressions are
2608 equal if they have no side effects. If we have two identical
2609 expressions with side effects that should be treated the same due
2610 to the only side effects being identical SAVE_EXPR's, that will
2611 be detected in the recursive calls below. */
2612 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2613 && (TREE_CODE (arg0) == SAVE_EXPR
2614 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2617 /* Next handle constant cases, those for which we can return 1 even
2618 if ONLY_CONST is set. */
2619 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2620 switch (TREE_CODE (arg0))
2623 return tree_int_cst_equal (arg0, arg1);
2626 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2627 TREE_REAL_CST (arg1)))
2631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2633 /* If we do not distinguish between signed and unsigned zero,
2634 consider them equal. */
2635 if (real_zerop (arg0) && real_zerop (arg1))
2644 v1 = TREE_VECTOR_CST_ELTS (arg0);
2645 v2 = TREE_VECTOR_CST_ELTS (arg1);
2648 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2651 v1 = TREE_CHAIN (v1);
2652 v2 = TREE_CHAIN (v2);
2659 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2661 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2665 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2666 && ! memcmp (TREE_STRING_POINTER (arg0),
2667 TREE_STRING_POINTER (arg1),
2668 TREE_STRING_LENGTH (arg0)));
2671 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2677 if (flags & OEP_ONLY_CONST)
2680 /* Define macros to test an operand from arg0 and arg1 for equality and a
2681 variant that allows null and views null as being different from any
2682 non-null value. In the latter case, if either is null, the both
2683 must be; otherwise, do the normal comparison. */
2684 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2685 TREE_OPERAND (arg1, N), flags)
2687 #define OP_SAME_WITH_NULL(N) \
2688 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2689 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2691 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2694 /* Two conversions are equal only if signedness and modes match. */
2695 switch (TREE_CODE (arg0))
2699 case FIX_TRUNC_EXPR:
2700 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2701 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2711 case tcc_comparison:
2713 if (OP_SAME (0) && OP_SAME (1))
2716 /* For commutative ops, allow the other order. */
2717 return (commutative_tree_code (TREE_CODE (arg0))
2718 && operand_equal_p (TREE_OPERAND (arg0, 0),
2719 TREE_OPERAND (arg1, 1), flags)
2720 && operand_equal_p (TREE_OPERAND (arg0, 1),
2721 TREE_OPERAND (arg1, 0), flags));
2724 /* If either of the pointer (or reference) expressions we are
2725 dereferencing contain a side effect, these cannot be equal. */
2726 if (TREE_SIDE_EFFECTS (arg0)
2727 || TREE_SIDE_EFFECTS (arg1))
2730 switch (TREE_CODE (arg0))
2733 case ALIGN_INDIRECT_REF:
2734 case MISALIGNED_INDIRECT_REF:
2740 case ARRAY_RANGE_REF:
2741 /* Operands 2 and 3 may be null. */
2744 && OP_SAME_WITH_NULL (2)
2745 && OP_SAME_WITH_NULL (3));
2748 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2749 may be NULL when we're called to compare MEM_EXPRs. */
2750 return OP_SAME_WITH_NULL (0)
2752 && OP_SAME_WITH_NULL (2);
2755 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2761 case tcc_expression:
2762 switch (TREE_CODE (arg0))
2765 case TRUTH_NOT_EXPR:
2768 case TRUTH_ANDIF_EXPR:
2769 case TRUTH_ORIF_EXPR:
2770 return OP_SAME (0) && OP_SAME (1);
2772 case TRUTH_AND_EXPR:
2774 case TRUTH_XOR_EXPR:
2775 if (OP_SAME (0) && OP_SAME (1))
2778 /* Otherwise take into account this is a commutative operation. */
2779 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2780 TREE_OPERAND (arg1, 1), flags)
2781 && operand_equal_p (TREE_OPERAND (arg0, 1),
2782 TREE_OPERAND (arg1, 0), flags));
2785 /* If the CALL_EXPRs call different functions, then they
2786 clearly can not be equal. */
2791 unsigned int cef = call_expr_flags (arg0);
2792 if (flags & OEP_PURE_SAME)
2793 cef &= ECF_CONST | ECF_PURE;
2800 /* Now see if all the arguments are the same. operand_equal_p
2801 does not handle TREE_LIST, so we walk the operands here
2802 feeding them to operand_equal_p. */
2803 arg0 = TREE_OPERAND (arg0, 1);
2804 arg1 = TREE_OPERAND (arg1, 1);
2805 while (arg0 && arg1)
2807 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2811 arg0 = TREE_CHAIN (arg0);
2812 arg1 = TREE_CHAIN (arg1);
2815 /* If we get here and both argument lists are exhausted
2816 then the CALL_EXPRs are equal. */
2817 return ! (arg0 || arg1);
2823 case tcc_declaration:
2824 /* Consider __builtin_sqrt equal to sqrt. */
2825 return (TREE_CODE (arg0) == FUNCTION_DECL
2826 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2827 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2828 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2835 #undef OP_SAME_WITH_NULL
2838 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2839 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2841 When in doubt, return 0. */
2844 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2846 int unsignedp1, unsignedpo;
2847 tree primarg0, primarg1, primother;
2848 unsigned int correct_width;
2850 if (operand_equal_p (arg0, arg1, 0))
2853 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2854 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2857 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2858 and see if the inner values are the same. This removes any
2859 signedness comparison, which doesn't matter here. */
2860 primarg0 = arg0, primarg1 = arg1;
2861 STRIP_NOPS (primarg0);
2862 STRIP_NOPS (primarg1);
2863 if (operand_equal_p (primarg0, primarg1, 0))
2866 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2867 actual comparison operand, ARG0.
2869 First throw away any conversions to wider types
2870 already present in the operands. */
2872 primarg1 = get_narrower (arg1, &unsignedp1);
2873 primother = get_narrower (other, &unsignedpo);
2875 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2876 if (unsignedp1 == unsignedpo
2877 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2878 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2880 tree type = TREE_TYPE (arg0);
2882 /* Make sure shorter operand is extended the right way
2883 to match the longer operand. */
2884 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2885 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2887 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2894 /* See if ARG is an expression that is either a comparison or is performing
2895 arithmetic on comparisons. The comparisons must only be comparing
2896 two different values, which will be stored in *CVAL1 and *CVAL2; if
2897 they are nonzero it means that some operands have already been found.
2898 No variables may be used anywhere else in the expression except in the
2899 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2900 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2902 If this is true, return 1. Otherwise, return zero. */
2905 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2907 enum tree_code code = TREE_CODE (arg);
2908 enum tree_code_class class = TREE_CODE_CLASS (code);
2910 /* We can handle some of the tcc_expression cases here. */
2911 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2913 else if (class == tcc_expression
2914 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2915 || code == COMPOUND_EXPR))
2918 else if (class == tcc_expression && code == SAVE_EXPR
2919 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2921 /* If we've already found a CVAL1 or CVAL2, this expression is
2922 two complex to handle. */
2923 if (*cval1 || *cval2)
2933 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2936 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2937 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2938 cval1, cval2, save_p));
2943 case tcc_expression:
2944 if (code == COND_EXPR)
2945 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2946 cval1, cval2, save_p)
2947 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2948 cval1, cval2, save_p)
2949 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2950 cval1, cval2, save_p));
2953 case tcc_comparison:
2954 /* First see if we can handle the first operand, then the second. For
2955 the second operand, we know *CVAL1 can't be zero. It must be that
2956 one side of the comparison is each of the values; test for the
2957 case where this isn't true by failing if the two operands
2960 if (operand_equal_p (TREE_OPERAND (arg, 0),
2961 TREE_OPERAND (arg, 1), 0))
2965 *cval1 = TREE_OPERAND (arg, 0);
2966 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2968 else if (*cval2 == 0)
2969 *cval2 = TREE_OPERAND (arg, 0);
2970 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2975 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2977 else if (*cval2 == 0)
2978 *cval2 = TREE_OPERAND (arg, 1);
2979 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2991 /* ARG is a tree that is known to contain just arithmetic operations and
2992 comparisons. Evaluate the operations in the tree substituting NEW0 for
2993 any occurrence of OLD0 as an operand of a comparison and likewise for
2997 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2999 tree type = TREE_TYPE (arg);
3000 enum tree_code code = TREE_CODE (arg);
3001 enum tree_code_class class = TREE_CODE_CLASS (code);
3003 /* We can handle some of the tcc_expression cases here. */
3004 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3006 else if (class == tcc_expression
3007 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3013 return fold_build1 (code, type,
3014 eval_subst (TREE_OPERAND (arg, 0),
3015 old0, new0, old1, new1));
3018 return fold_build2 (code, type,
3019 eval_subst (TREE_OPERAND (arg, 0),
3020 old0, new0, old1, new1),
3021 eval_subst (TREE_OPERAND (arg, 1),
3022 old0, new0, old1, new1));
3024 case tcc_expression:
3028 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3031 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3034 return fold_build3 (code, type,
3035 eval_subst (TREE_OPERAND (arg, 0),
3036 old0, new0, old1, new1),
3037 eval_subst (TREE_OPERAND (arg, 1),
3038 old0, new0, old1, new1),
3039 eval_subst (TREE_OPERAND (arg, 2),
3040 old0, new0, old1, new1));
3044 /* Fall through - ??? */
3046 case tcc_comparison:
3048 tree arg0 = TREE_OPERAND (arg, 0);
3049 tree arg1 = TREE_OPERAND (arg, 1);
3051 /* We need to check both for exact equality and tree equality. The
3052 former will be true if the operand has a side-effect. In that
3053 case, we know the operand occurred exactly once. */
3055 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3057 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3060 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3062 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3065 return fold_build2 (code, type, arg0, arg1);
3073 /* Return a tree for the case when the result of an expression is RESULT
3074 converted to TYPE and OMITTED was previously an operand of the expression
3075 but is now not needed (e.g., we folded OMITTED * 0).
3077 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3078 the conversion of RESULT to TYPE. */
3081 omit_one_operand (tree type, tree result, tree omitted)
3083 tree t = fold_convert (type, result);
3085 if (TREE_SIDE_EFFECTS (omitted))
3086 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3088 return non_lvalue (t);
3091 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3094 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3096 tree t = fold_convert (type, result);
3098 if (TREE_SIDE_EFFECTS (omitted))
3099 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3101 return pedantic_non_lvalue (t);
3104 /* Return a tree for the case when the result of an expression is RESULT
3105 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3106 of the expression but are now not needed.
3108 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3109 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3110 evaluated before OMITTED2. Otherwise, if neither has side effects,
3111 just do the conversion of RESULT to TYPE. */
3114 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3116 tree t = fold_convert (type, result);
3118 if (TREE_SIDE_EFFECTS (omitted2))
3119 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3120 if (TREE_SIDE_EFFECTS (omitted1))
3121 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3123 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3127 /* Return a simplified tree node for the truth-negation of ARG. This
3128 never alters ARG itself. We assume that ARG is an operation that
3129 returns a truth value (0 or 1).
3131 FIXME: one would think we would fold the result, but it causes
3132 problems with the dominator optimizer. */
3135 fold_truth_not_expr (tree arg)
3137 tree type = TREE_TYPE (arg);
3138 enum tree_code code = TREE_CODE (arg);
3140 /* If this is a comparison, we can simply invert it, except for
3141 floating-point non-equality comparisons, in which case we just
3142 enclose a TRUTH_NOT_EXPR around what we have. */
3144 if (TREE_CODE_CLASS (code) == tcc_comparison)
3146 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3147 if (FLOAT_TYPE_P (op_type)
3148 && flag_trapping_math
3149 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3150 && code != NE_EXPR && code != EQ_EXPR)
3154 code = invert_tree_comparison (code,
3155 HONOR_NANS (TYPE_MODE (op_type)));
3156 if (code == ERROR_MARK)
3159 return build2 (code, type,
3160 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3167 return constant_boolean_node (integer_zerop (arg), type);
3169 case TRUTH_AND_EXPR:
3170 return build2 (TRUTH_OR_EXPR, type,
3171 invert_truthvalue (TREE_OPERAND (arg, 0)),
3172 invert_truthvalue (TREE_OPERAND (arg, 1)));
3175 return build2 (TRUTH_AND_EXPR, type,
3176 invert_truthvalue (TREE_OPERAND (arg, 0)),
3177 invert_truthvalue (TREE_OPERAND (arg, 1)));
3179 case TRUTH_XOR_EXPR:
3180 /* Here we can invert either operand. We invert the first operand
3181 unless the second operand is a TRUTH_NOT_EXPR in which case our
3182 result is the XOR of the first operand with the inside of the
3183 negation of the second operand. */
3185 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3186 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3187 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3189 return build2 (TRUTH_XOR_EXPR, type,
3190 invert_truthvalue (TREE_OPERAND (arg, 0)),
3191 TREE_OPERAND (arg, 1));
3193 case TRUTH_ANDIF_EXPR:
3194 return build2 (TRUTH_ORIF_EXPR, type,
3195 invert_truthvalue (TREE_OPERAND (arg, 0)),
3196 invert_truthvalue (TREE_OPERAND (arg, 1)));
3198 case TRUTH_ORIF_EXPR:
3199 return build2 (TRUTH_ANDIF_EXPR, type,
3200 invert_truthvalue (TREE_OPERAND (arg, 0)),
3201 invert_truthvalue (TREE_OPERAND (arg, 1)));
3203 case TRUTH_NOT_EXPR:
3204 return TREE_OPERAND (arg, 0);
3208 tree arg1 = TREE_OPERAND (arg, 1);
3209 tree arg2 = TREE_OPERAND (arg, 2);
3210 /* A COND_EXPR may have a throw as one operand, which
3211 then has void type. Just leave void operands
3213 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3214 VOID_TYPE_P (TREE_TYPE (arg1))
3215 ? arg1 : invert_truthvalue (arg1),
3216 VOID_TYPE_P (TREE_TYPE (arg2))
3217 ? arg2 : invert_truthvalue (arg2));
3221 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3222 invert_truthvalue (TREE_OPERAND (arg, 1)));
3224 case NON_LVALUE_EXPR:
3225 return invert_truthvalue (TREE_OPERAND (arg, 0));
3228 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3229 return build1 (TRUTH_NOT_EXPR, type, arg);
3233 return build1 (TREE_CODE (arg), type,
3234 invert_truthvalue (TREE_OPERAND (arg, 0)));
3237 if (!integer_onep (TREE_OPERAND (arg, 1)))
3239 return build2 (EQ_EXPR, type, arg,
3240 build_int_cst (type, 0));
3243 return build1 (TRUTH_NOT_EXPR, type, arg);
3245 case CLEANUP_POINT_EXPR:
3246 return build1 (CLEANUP_POINT_EXPR, type,
3247 invert_truthvalue (TREE_OPERAND (arg, 0)));
3256 /* Return a simplified tree node for the truth-negation of ARG. This
3257 never alters ARG itself. We assume that ARG is an operation that
3258 returns a truth value (0 or 1).
3260 FIXME: one would think we would fold the result, but it causes
3261 problems with the dominator optimizer. */
3264 invert_truthvalue (tree arg)
3268 if (TREE_CODE (arg) == ERROR_MARK)
3271 tem = fold_truth_not_expr (arg);
3273 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3278 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3279 operands are another bit-wise operation with a common input. If so,
3280 distribute the bit operations to save an operation and possibly two if
3281 constants are involved. For example, convert
3282 (A | B) & (A | C) into A | (B & C)
3283 Further simplification will occur if B and C are constants.
3285 If this optimization cannot be done, 0 will be returned. */
3288 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3293 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3294 || TREE_CODE (arg0) == code
3295 || (TREE_CODE (arg0) != BIT_AND_EXPR
3296 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3299 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3301 common = TREE_OPERAND (arg0, 0);
3302 left = TREE_OPERAND (arg0, 1);
3303 right = TREE_OPERAND (arg1, 1);
3305 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3307 common = TREE_OPERAND (arg0, 0);
3308 left = TREE_OPERAND (arg0, 1);
3309 right = TREE_OPERAND (arg1, 0);
3311 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3313 common = TREE_OPERAND (arg0, 1);
3314 left = TREE_OPERAND (arg0, 0);
3315 right = TREE_OPERAND (arg1, 1);
3317 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3319 common = TREE_OPERAND (arg0, 1);
3320 left = TREE_OPERAND (arg0, 0);
3321 right = TREE_OPERAND (arg1, 0);
3326 return fold_build2 (TREE_CODE (arg0), type, common,
3327 fold_build2 (code, type, left, right));
3330 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3331 with code CODE. This optimization is unsafe. */
3333 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3335 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3336 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3338 /* (A / C) +- (B / C) -> (A +- B) / C. */
3340 && operand_equal_p (TREE_OPERAND (arg0, 1),
3341 TREE_OPERAND (arg1, 1), 0))
3342 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3343 fold_build2 (code, type,
3344 TREE_OPERAND (arg0, 0),
3345 TREE_OPERAND (arg1, 0)),
3346 TREE_OPERAND (arg0, 1));
3348 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3349 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3350 TREE_OPERAND (arg1, 0), 0)
3351 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3352 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3354 REAL_VALUE_TYPE r0, r1;
3355 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3356 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3358 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3360 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3361 real_arithmetic (&r0, code, &r0, &r1);
3362 return fold_build2 (MULT_EXPR, type,
3363 TREE_OPERAND (arg0, 0),
3364 build_real (type, r0));
3370 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3371 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3374 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3381 tree size = TYPE_SIZE (TREE_TYPE (inner));
3382 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3383 || POINTER_TYPE_P (TREE_TYPE (inner)))
3384 && host_integerp (size, 0)
3385 && tree_low_cst (size, 0) == bitsize)
3386 return fold_convert (type, inner);
3389 result = build3 (BIT_FIELD_REF, type, inner,
3390 size_int (bitsize), bitsize_int (bitpos));
3392 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3397 /* Optimize a bit-field compare.
3399 There are two cases: First is a compare against a constant and the
3400 second is a comparison of two items where the fields are at the same
3401 bit position relative to the start of a chunk (byte, halfword, word)
3402 large enough to contain it. In these cases we can avoid the shift
3403 implicit in bitfield extractions.
3405 For constants, we emit a compare of the shifted constant with the
3406 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3407 compared. For two fields at the same position, we do the ANDs with the
3408 similar mask and compare the result of the ANDs.
3410 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3411 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3412 are the left and right operands of the comparison, respectively.
3414 If the optimization described above can be done, we return the resulting
3415 tree. Otherwise we return zero. */
3418 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3421 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3422 tree type = TREE_TYPE (lhs);
3423 tree signed_type, unsigned_type;
3424 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3425 enum machine_mode lmode, rmode, nmode;
3426 int lunsignedp, runsignedp;
3427 int lvolatilep = 0, rvolatilep = 0;
3428 tree linner, rinner = NULL_TREE;
3432 /* Get all the information about the extractions being done. If the bit size
3433 if the same as the size of the underlying object, we aren't doing an
3434 extraction at all and so can do nothing. We also don't want to
3435 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3436 then will no longer be able to replace it. */
3437 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3438 &lunsignedp, &lvolatilep, false);
3439 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3440 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3445 /* If this is not a constant, we can only do something if bit positions,
3446 sizes, and signedness are the same. */
3447 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3448 &runsignedp, &rvolatilep, false);
3450 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3451 || lunsignedp != runsignedp || offset != 0
3452 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3456 /* See if we can find a mode to refer to this field. We should be able to,
3457 but fail if we can't. */
3458 nmode = get_best_mode (lbitsize, lbitpos,
3459 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3460 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3461 TYPE_ALIGN (TREE_TYPE (rinner))),
3462 word_mode, lvolatilep || rvolatilep);
3463 if (nmode == VOIDmode)
3466 /* Set signed and unsigned types of the precision of this mode for the
3468 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3469 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3471 /* Compute the bit position and size for the new reference and our offset
3472 within it. If the new reference is the same size as the original, we
3473 won't optimize anything, so return zero. */
3474 nbitsize = GET_MODE_BITSIZE (nmode);
3475 nbitpos = lbitpos & ~ (nbitsize - 1);
3477 if (nbitsize == lbitsize)
3480 if (BYTES_BIG_ENDIAN)
3481 lbitpos = nbitsize - lbitsize - lbitpos;
3483 /* Make the mask to be used against the extracted field. */
3484 mask = build_int_cst_type (unsigned_type, -1);
3485 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3486 mask = const_binop (RSHIFT_EXPR, mask,
3487 size_int (nbitsize - lbitsize - lbitpos), 0);
3490 /* If not comparing with constant, just rework the comparison
3492 return fold_build2 (code, compare_type,
3493 fold_build2 (BIT_AND_EXPR, unsigned_type,
3494 make_bit_field_ref (linner,
3499 fold_build2 (BIT_AND_EXPR, unsigned_type,
3500 make_bit_field_ref (rinner,
3506 /* Otherwise, we are handling the constant case. See if the constant is too
3507 big for the field. Warn and return a tree of for 0 (false) if so. We do
3508 this not only for its own sake, but to avoid having to test for this
3509 error case below. If we didn't, we might generate wrong code.
3511 For unsigned fields, the constant shifted right by the field length should
3512 be all zero. For signed fields, the high-order bits should agree with
3517 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3518 fold_convert (unsigned_type, rhs),
3519 size_int (lbitsize), 0)))
3521 warning (0, "comparison is always %d due to width of bit-field",
3523 return constant_boolean_node (code == NE_EXPR, compare_type);
3528 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3529 size_int (lbitsize - 1), 0);
3530 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3532 warning (0, "comparison is always %d due to width of bit-field",
3534 return constant_boolean_node (code == NE_EXPR, compare_type);
3538 /* Single-bit compares should always be against zero. */
3539 if (lbitsize == 1 && ! integer_zerop (rhs))
3541 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3542 rhs = build_int_cst (type, 0);
3545 /* Make a new bitfield reference, shift the constant over the
3546 appropriate number of bits and mask it with the computed mask
3547 (in case this was a signed field). If we changed it, make a new one. */
3548 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3551 TREE_SIDE_EFFECTS (lhs) = 1;
3552 TREE_THIS_VOLATILE (lhs) = 1;
3555 rhs = const_binop (BIT_AND_EXPR,
3556 const_binop (LSHIFT_EXPR,
3557 fold_convert (unsigned_type, rhs),
3558 size_int (lbitpos), 0),
3561 return build2 (code, compare_type,
3562 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3566 /* Subroutine for fold_truthop: decode a field reference.
3568 If EXP is a comparison reference, we return the innermost reference.
3570 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3571 set to the starting bit number.
3573 If the innermost field can be completely contained in a mode-sized
3574 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3576 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3577 otherwise it is not changed.
3579 *PUNSIGNEDP is set to the signedness of the field.
3581 *PMASK is set to the mask used. This is either contained in a
3582 BIT_AND_EXPR or derived from the width of the field.
3584 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3586 Return 0 if this is not a component reference or is one that we can't
3587 do anything with. */
3590 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3591 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3592 int *punsignedp, int *pvolatilep,
3593 tree *pmask, tree *pand_mask)
3595 tree outer_type = 0;
3597 tree mask, inner, offset;
3599 unsigned int precision;
3601 /* All the optimizations using this function assume integer fields.
3602 There are problems with FP fields since the type_for_size call
3603 below can fail for, e.g., XFmode. */
3604 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3607 /* We are interested in the bare arrangement of bits, so strip everything
3608 that doesn't affect the machine mode. However, record the type of the
3609 outermost expression if it may matter below. */
3610 if (TREE_CODE (exp) == NOP_EXPR
3611 || TREE_CODE (exp) == CONVERT_EXPR
3612 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3613 outer_type = TREE_TYPE (exp);
3616 if (TREE_CODE (exp) == BIT_AND_EXPR)
3618 and_mask = TREE_OPERAND (exp, 1);
3619 exp = TREE_OPERAND (exp, 0);
3620 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3621 if (TREE_CODE (and_mask) != INTEGER_CST)
3625 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3626 punsignedp, pvolatilep, false);
3627 if ((inner == exp && and_mask == 0)
3628 || *pbitsize < 0 || offset != 0
3629 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3632 /* If the number of bits in the reference is the same as the bitsize of
3633 the outer type, then the outer type gives the signedness. Otherwise
3634 (in case of a small bitfield) the signedness is unchanged. */
3635 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3636 *punsignedp = TYPE_UNSIGNED (outer_type);
3638 /* Compute the mask to access the bitfield. */
3639 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3640 precision = TYPE_PRECISION (unsigned_type);
3642 mask = build_int_cst_type (unsigned_type, -1);
3644 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3645 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3647 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3649 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3650 fold_convert (unsigned_type, and_mask), mask);
3653 *pand_mask = and_mask;
3657 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3661 all_ones_mask_p (tree mask, int size)
3663 tree type = TREE_TYPE (mask);
3664 unsigned int precision = TYPE_PRECISION (type);
3667 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3670 tree_int_cst_equal (mask,
3671 const_binop (RSHIFT_EXPR,
3672 const_binop (LSHIFT_EXPR, tmask,
3673 size_int (precision - size),
3675 size_int (precision - size), 0));
3678 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3679 represents the sign bit of EXP's type. If EXP represents a sign
3680 or zero extension, also test VAL against the unextended type.
3681 The return value is the (sub)expression whose sign bit is VAL,
3682 or NULL_TREE otherwise. */
3685 sign_bit_p (tree exp, tree val)
3687 unsigned HOST_WIDE_INT mask_lo, lo;
3688 HOST_WIDE_INT mask_hi, hi;
3692 /* Tree EXP must have an integral type. */
3693 t = TREE_TYPE (exp);
3694 if (! INTEGRAL_TYPE_P (t))
3697 /* Tree VAL must be an integer constant. */
3698 if (TREE_CODE (val) != INTEGER_CST
3699 || TREE_CONSTANT_OVERFLOW (val))
3702 width = TYPE_PRECISION (t);
3703 if (width > HOST_BITS_PER_WIDE_INT)
3705 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3708 mask_hi = ((unsigned HOST_WIDE_INT) -1
3709 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3715 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3718 mask_lo = ((unsigned HOST_WIDE_INT) -1
3719 >> (HOST_BITS_PER_WIDE_INT - width));
3722 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3723 treat VAL as if it were unsigned. */
3724 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3725 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3728 /* Handle extension from a narrower type. */
3729 if (TREE_CODE (exp) == NOP_EXPR
3730 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3731 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3736 /* Subroutine for fold_truthop: determine if an operand is simple enough
3737 to be evaluated unconditionally. */
3740 simple_operand_p (tree exp)
3742 /* Strip any conversions that don't change the machine mode. */
3745 return (CONSTANT_CLASS_P (exp)
3746 || TREE_CODE (exp) == SSA_NAME
3748 && ! TREE_ADDRESSABLE (exp)
3749 && ! TREE_THIS_VOLATILE (exp)
3750 && ! DECL_NONLOCAL (exp)
3751 /* Don't regard global variables as simple. They may be
3752 allocated in ways unknown to the compiler (shared memory,
3753 #pragma weak, etc). */
3754 && ! TREE_PUBLIC (exp)
3755 && ! DECL_EXTERNAL (exp)
3756 /* Loading a static variable is unduly expensive, but global
3757 registers aren't expensive. */
3758 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3761 /* The following functions are subroutines to fold_range_test and allow it to
3762 try to change a logical combination of comparisons into a range test.
3765 X == 2 || X == 3 || X == 4 || X == 5
3769 (unsigned) (X - 2) <= 3
3771 We describe each set of comparisons as being either inside or outside
3772 a range, using a variable named like IN_P, and then describe the
3773 range with a lower and upper bound. If one of the bounds is omitted,
3774 it represents either the highest or lowest value of the type.
3776 In the comments below, we represent a range by two numbers in brackets
3777 preceded by a "+" to designate being inside that range, or a "-" to
3778 designate being outside that range, so the condition can be inverted by
3779 flipping the prefix. An omitted bound is represented by a "-". For
3780 example, "- [-, 10]" means being outside the range starting at the lowest
3781 possible value and ending at 10, in other words, being greater than 10.
3782 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3785 We set up things so that the missing bounds are handled in a consistent
3786 manner so neither a missing bound nor "true" and "false" need to be
3787 handled using a special case. */
3789 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3790 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3791 and UPPER1_P are nonzero if the respective argument is an upper bound
3792 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3793 must be specified for a comparison. ARG1 will be converted to ARG0's
3794 type if both are specified. */
3797 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3798 tree arg1, int upper1_p)
3804 /* If neither arg represents infinity, do the normal operation.
3805 Else, if not a comparison, return infinity. Else handle the special
3806 comparison rules. Note that most of the cases below won't occur, but
3807 are handled for consistency. */
3809 if (arg0 != 0 && arg1 != 0)
3811 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3812 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3814 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3817 if (TREE_CODE_CLASS (code) != tcc_comparison)
3820 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3821 for neither. In real maths, we cannot assume open ended ranges are
3822 the same. But, this is computer arithmetic, where numbers are finite.
3823 We can therefore make the transformation of any unbounded range with
3824 the value Z, Z being greater than any representable number. This permits
3825 us to treat unbounded ranges as equal. */
3826 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3827 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3831 result = sgn0 == sgn1;
3834 result = sgn0 != sgn1;
3837 result = sgn0 < sgn1;
3840 result = sgn0 <= sgn1;
3843 result = sgn0 > sgn1;
3846 result = sgn0 >= sgn1;
3852 return constant_boolean_node (result, type);
3855 /* Given EXP, a logical expression, set the range it is testing into
3856 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3857 actually being tested. *PLOW and *PHIGH will be made of the same type
3858 as the returned expression. If EXP is not a comparison, we will most
3859 likely not be returning a useful value and range. */
3862 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3864 enum tree_code code;
3865 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3866 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3868 tree low, high, n_low, n_high;
3870 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3871 and see if we can refine the range. Some of the cases below may not
3872 happen, but it doesn't seem worth worrying about this. We "continue"
3873 the outer loop when we've changed something; otherwise we "break"
3874 the switch, which will "break" the while. */
3877 low = high = build_int_cst (TREE_TYPE (exp), 0);
3881 code = TREE_CODE (exp);
3882 exp_type = TREE_TYPE (exp);
3884 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3886 if (TREE_CODE_LENGTH (code) > 0)
3887 arg0 = TREE_OPERAND (exp, 0);
3888 if (TREE_CODE_CLASS (code) == tcc_comparison
3889 || TREE_CODE_CLASS (code) == tcc_unary
3890 || TREE_CODE_CLASS (code) == tcc_binary)
3891 arg0_type = TREE_TYPE (arg0);
3892 if (TREE_CODE_CLASS (code) == tcc_binary
3893 || TREE_CODE_CLASS (code) == tcc_comparison
3894 || (TREE_CODE_CLASS (code) == tcc_expression
3895 && TREE_CODE_LENGTH (code) > 1))
3896 arg1 = TREE_OPERAND (exp, 1);
3901 case TRUTH_NOT_EXPR:
3902 in_p = ! in_p, exp = arg0;
3905 case EQ_EXPR: case NE_EXPR:
3906 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3907 /* We can only do something if the range is testing for zero
3908 and if the second operand is an integer constant. Note that
3909 saying something is "in" the range we make is done by
3910 complementing IN_P since it will set in the initial case of
3911 being not equal to zero; "out" is leaving it alone. */
3912 if (low == 0 || high == 0
3913 || ! integer_zerop (low) || ! integer_zerop (high)
3914 || TREE_CODE (arg1) != INTEGER_CST)
3919 case NE_EXPR: /* - [c, c] */
3922 case EQ_EXPR: /* + [c, c] */
3923 in_p = ! in_p, low = high = arg1;
3925 case GT_EXPR: /* - [-, c] */
3926 low = 0, high = arg1;
3928 case GE_EXPR: /* + [c, -] */
3929 in_p = ! in_p, low = arg1, high = 0;
3931 case LT_EXPR: /* - [c, -] */
3932 low = arg1, high = 0;
3934 case LE_EXPR: /* + [-, c] */
3935 in_p = ! in_p, low = 0, high = arg1;
3941 /* If this is an unsigned comparison, we also know that EXP is
3942 greater than or equal to zero. We base the range tests we make
3943 on that fact, so we record it here so we can parse existing
3944 range tests. We test arg0_type since often the return type
3945 of, e.g. EQ_EXPR, is boolean. */
3946 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3948 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3950 build_int_cst (arg0_type, 0),
3954 in_p = n_in_p, low = n_low, high = n_high;
3956 /* If the high bound is missing, but we have a nonzero low
3957 bound, reverse the range so it goes from zero to the low bound
3959 if (high == 0 && low && ! integer_zerop (low))
3962 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3963 integer_one_node, 0);
3964 low = build_int_cst (arg0_type, 0);
3972 /* (-x) IN [a,b] -> x in [-b, -a] */
3973 n_low = range_binop (MINUS_EXPR, exp_type,
3974 build_int_cst (exp_type, 0),
3976 n_high = range_binop (MINUS_EXPR, exp_type,
3977 build_int_cst (exp_type, 0),
3979 low = n_low, high = n_high;
3985 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3986 build_int_cst (exp_type, 1));
3989 case PLUS_EXPR: case MINUS_EXPR:
3990 if (TREE_CODE (arg1) != INTEGER_CST)
3993 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3994 move a constant to the other side. */
3995 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3998 /* If EXP is signed, any overflow in the computation is undefined,
3999 so we don't worry about it so long as our computations on
4000 the bounds don't overflow. For unsigned, overflow is defined
4001 and this is exactly the right thing. */
4002 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4003 arg0_type, low, 0, arg1, 0);
4004 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4005 arg0_type, high, 1, arg1, 0);
4006 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4007 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4010 /* Check for an unsigned range which has wrapped around the maximum
4011 value thus making n_high < n_low, and normalize it. */
4012 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4014 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4015 integer_one_node, 0);
4016 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4017 integer_one_node, 0);
4019 /* If the range is of the form +/- [ x+1, x ], we won't
4020 be able to normalize it. But then, it represents the
4021 whole range or the empty set, so make it
4023 if (tree_int_cst_equal (n_low, low)
4024 && tree_int_cst_equal (n_high, high))
4030 low = n_low, high = n_high;
4035 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4036 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4039 if (! INTEGRAL_TYPE_P (arg0_type)
4040 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4041 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4044 n_low = low, n_high = high;
4047 n_low = fold_convert (arg0_type, n_low);
4050 n_high = fold_convert (arg0_type, n_high);
4053 /* If we're converting arg0 from an unsigned type, to exp,
4054 a signed type, we will be doing the comparison as unsigned.
4055 The tests above have already verified that LOW and HIGH
4058 So we have to ensure that we will handle large unsigned
4059 values the same way that the current signed bounds treat
4062 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4065 tree equiv_type = lang_hooks.types.type_for_mode
4066 (TYPE_MODE (arg0_type), 1);
4068 /* A range without an upper bound is, naturally, unbounded.
4069 Since convert would have cropped a very large value, use
4070 the max value for the destination type. */