1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 extern tree make_range (tree, int *, tree *, tree *, bool *);
119 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
121 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (location_t, enum tree_code,
127 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
128 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
129 static tree fold_binary_op_with_conditional_arg (location_t,
130 enum tree_code, tree,
133 static tree fold_mathfn_compare (location_t,
134 enum built_in_function, enum tree_code,
136 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
137 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
138 static bool reorder_operands_p (const_tree, const_tree);
139 static tree fold_negate_const (tree, tree);
140 static tree fold_not_const (const_tree, tree);
141 static tree fold_relational_const (enum tree_code, tree, tree, tree);
142 static tree fold_convert_const (enum tree_code, tree, tree);
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145 Otherwise, return LOC. */
148 expr_location_or (tree t, location_t loc)
150 location_t tloc = EXPR_LOCATION (t);
151 return tloc != UNKNOWN_LOCATION ? tloc : loc;
154 /* Similar to protected_set_expr_location, but never modify x in place,
155 if location can and needs to be set, unshare it. */
158 protected_set_expr_location_unshare (tree x, location_t loc)
160 if (CAN_HAVE_LOCATION_P (x)
161 && EXPR_LOCATION (x) != loc
162 && !(TREE_CODE (x) == SAVE_EXPR
163 || TREE_CODE (x) == TARGET_EXPR
164 || TREE_CODE (x) == BIND_EXPR))
167 SET_EXPR_LOCATION (x, loc);
173 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
174 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
175 and SUM1. Then this yields nonzero if overflow occurred during the
178 Overflow occurs if A and B have the same sign, but A and SUM differ in
179 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
181 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
183 /* If ARG2 divides ARG1 with zero remainder, carries out the division
184 of type CODE and returns the quotient.
185 Otherwise returns NULL_TREE. */
188 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
193 /* The sign of the division is according to operand two, that
194 does the correct thing for POINTER_PLUS_EXPR where we want
195 a signed division. */
196 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
197 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
198 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
201 quo = double_int_divmod (tree_to_double_int (arg1),
202 tree_to_double_int (arg2),
205 if (double_int_zero_p (rem))
206 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
211 /* This is nonzero if we should defer warnings about undefined
212 overflow. This facility exists because these warnings are a
213 special case. The code to estimate loop iterations does not want
214 to issue any warnings, since it works with expressions which do not
215 occur in user code. Various bits of cleanup code call fold(), but
216 only use the result if it has certain characteristics (e.g., is a
217 constant); that code only wants to issue a warning if the result is
220 static int fold_deferring_overflow_warnings;
222 /* If a warning about undefined overflow is deferred, this is the
223 warning. Note that this may cause us to turn two warnings into
224 one, but that is fine since it is sufficient to only give one
225 warning per expression. */
227 static const char* fold_deferred_overflow_warning;
229 /* If a warning about undefined overflow is deferred, this is the
230 level at which the warning should be emitted. */
232 static enum warn_strict_overflow_code fold_deferred_overflow_code;
234 /* Start deferring overflow warnings. We could use a stack here to
235 permit nested calls, but at present it is not necessary. */
238 fold_defer_overflow_warnings (void)
240 ++fold_deferring_overflow_warnings;
243 /* Stop deferring overflow warnings. If there is a pending warning,
244 and ISSUE is true, then issue the warning if appropriate. STMT is
245 the statement with which the warning should be associated (used for
246 location information); STMT may be NULL. CODE is the level of the
247 warning--a warn_strict_overflow_code value. This function will use
248 the smaller of CODE and the deferred code when deciding whether to
249 issue the warning. CODE may be zero to mean to always use the
253 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
258 gcc_assert (fold_deferring_overflow_warnings > 0);
259 --fold_deferring_overflow_warnings;
260 if (fold_deferring_overflow_warnings > 0)
262 if (fold_deferred_overflow_warning != NULL
264 && code < (int) fold_deferred_overflow_code)
265 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
269 warnmsg = fold_deferred_overflow_warning;
270 fold_deferred_overflow_warning = NULL;
272 if (!issue || warnmsg == NULL)
275 if (gimple_no_warning_p (stmt))
278 /* Use the smallest code level when deciding to issue the
280 if (code == 0 || code > (int) fold_deferred_overflow_code)
281 code = fold_deferred_overflow_code;
283 if (!issue_strict_overflow_warning (code))
287 locus = input_location;
289 locus = gimple_location (stmt);
290 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
293 /* Stop deferring overflow warnings, ignoring any deferred
297 fold_undefer_and_ignore_overflow_warnings (void)
299 fold_undefer_overflow_warnings (false, NULL, 0);
302 /* Whether we are deferring overflow warnings. */
305 fold_deferring_overflow_warnings_p (void)
307 return fold_deferring_overflow_warnings > 0;
310 /* This is called when we fold something based on the fact that signed
311 overflow is undefined. */
314 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
316 if (fold_deferring_overflow_warnings > 0)
318 if (fold_deferred_overflow_warning == NULL
319 || wc < fold_deferred_overflow_code)
321 fold_deferred_overflow_warning = gmsgid;
322 fold_deferred_overflow_code = wc;
325 else if (issue_strict_overflow_warning (wc))
326 warning (OPT_Wstrict_overflow, gmsgid);
329 /* Return true if the built-in mathematical function specified by CODE
330 is odd, i.e. -f(x) == f(-x). */
333 negate_mathfn_p (enum built_in_function code)
337 CASE_FLT_FN (BUILT_IN_ASIN):
338 CASE_FLT_FN (BUILT_IN_ASINH):
339 CASE_FLT_FN (BUILT_IN_ATAN):
340 CASE_FLT_FN (BUILT_IN_ATANH):
341 CASE_FLT_FN (BUILT_IN_CASIN):
342 CASE_FLT_FN (BUILT_IN_CASINH):
343 CASE_FLT_FN (BUILT_IN_CATAN):
344 CASE_FLT_FN (BUILT_IN_CATANH):
345 CASE_FLT_FN (BUILT_IN_CBRT):
346 CASE_FLT_FN (BUILT_IN_CPROJ):
347 CASE_FLT_FN (BUILT_IN_CSIN):
348 CASE_FLT_FN (BUILT_IN_CSINH):
349 CASE_FLT_FN (BUILT_IN_CTAN):
350 CASE_FLT_FN (BUILT_IN_CTANH):
351 CASE_FLT_FN (BUILT_IN_ERF):
352 CASE_FLT_FN (BUILT_IN_LLROUND):
353 CASE_FLT_FN (BUILT_IN_LROUND):
354 CASE_FLT_FN (BUILT_IN_ROUND):
355 CASE_FLT_FN (BUILT_IN_SIN):
356 CASE_FLT_FN (BUILT_IN_SINH):
357 CASE_FLT_FN (BUILT_IN_TAN):
358 CASE_FLT_FN (BUILT_IN_TANH):
359 CASE_FLT_FN (BUILT_IN_TRUNC):
362 CASE_FLT_FN (BUILT_IN_LLRINT):
363 CASE_FLT_FN (BUILT_IN_LRINT):
364 CASE_FLT_FN (BUILT_IN_NEARBYINT):
365 CASE_FLT_FN (BUILT_IN_RINT):
366 return !flag_rounding_math;
374 /* Check whether we may negate an integer constant T without causing
378 may_negate_without_overflow_p (const_tree t)
380 unsigned HOST_WIDE_INT val;
384 gcc_assert (TREE_CODE (t) == INTEGER_CST);
386 type = TREE_TYPE (t);
387 if (TYPE_UNSIGNED (type))
390 prec = TYPE_PRECISION (type);
391 if (prec > HOST_BITS_PER_WIDE_INT)
393 if (TREE_INT_CST_LOW (t) != 0)
395 prec -= HOST_BITS_PER_WIDE_INT;
396 val = TREE_INT_CST_HIGH (t);
399 val = TREE_INT_CST_LOW (t);
400 if (prec < HOST_BITS_PER_WIDE_INT)
401 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
402 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
405 /* Determine whether an expression T can be cheaply negated using
406 the function negate_expr without introducing undefined overflow. */
409 negate_expr_p (tree t)
416 type = TREE_TYPE (t);
419 switch (TREE_CODE (t))
422 if (TYPE_OVERFLOW_WRAPS (type))
425 /* Check that -CST will not overflow type. */
426 return may_negate_without_overflow_p (t);
428 return (INTEGRAL_TYPE_P (type)
429 && TYPE_OVERFLOW_WRAPS (type));
436 /* We want to canonicalize to positive real constants. Pretend
437 that only negative ones can be easily negated. */
438 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
441 return negate_expr_p (TREE_REALPART (t))
442 && negate_expr_p (TREE_IMAGPART (t));
445 return negate_expr_p (TREE_OPERAND (t, 0))
446 && negate_expr_p (TREE_OPERAND (t, 1));
449 return negate_expr_p (TREE_OPERAND (t, 0));
452 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
453 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
455 /* -(A + B) -> (-B) - A. */
456 if (negate_expr_p (TREE_OPERAND (t, 1))
457 && reorder_operands_p (TREE_OPERAND (t, 0),
458 TREE_OPERAND (t, 1)))
460 /* -(A + B) -> (-A) - B. */
461 return negate_expr_p (TREE_OPERAND (t, 0));
464 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
465 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
466 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
467 && reorder_operands_p (TREE_OPERAND (t, 0),
468 TREE_OPERAND (t, 1));
471 if (TYPE_UNSIGNED (TREE_TYPE (t)))
477 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
478 return negate_expr_p (TREE_OPERAND (t, 1))
479 || negate_expr_p (TREE_OPERAND (t, 0));
487 /* In general we can't negate A / B, because if A is INT_MIN and
488 B is 1, we may turn this into INT_MIN / -1 which is undefined
489 and actually traps on some architectures. But if overflow is
490 undefined, we can negate, because - (INT_MIN / 1) is an
492 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
493 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
495 return negate_expr_p (TREE_OPERAND (t, 1))
496 || negate_expr_p (TREE_OPERAND (t, 0));
499 /* Negate -((double)float) as (double)(-float). */
500 if (TREE_CODE (type) == REAL_TYPE)
502 tree tem = strip_float_extensions (t);
504 return negate_expr_p (tem);
509 /* Negate -f(x) as f(-x). */
510 if (negate_mathfn_p (builtin_mathfn_code (t)))
511 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
516 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
518 tree op1 = TREE_OPERAND (t, 1);
519 if (TREE_INT_CST_HIGH (op1) == 0
520 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
521 == TREE_INT_CST_LOW (op1))
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
538 fold_negate_expr (location_t loc, tree t)
540 tree type = TREE_TYPE (t);
543 switch (TREE_CODE (t))
545 /* Convert - (~A) to A + 1. */
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_int_cst (type, 1));
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || !TYPE_OVERFLOW_TRAPS (type))
560 tem = fold_negate_const (t, type);
561 /* Two's complement FP formats, such as c4x, may overflow. */
562 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
567 tem = fold_negate_const (t, type);
572 tree rpart = negate_expr (TREE_REALPART (t));
573 tree ipart = negate_expr (TREE_IMAGPART (t));
575 if ((TREE_CODE (rpart) == REAL_CST
576 && TREE_CODE (ipart) == REAL_CST)
577 || (TREE_CODE (rpart) == INTEGER_CST
578 && TREE_CODE (ipart) == INTEGER_CST))
579 return build_complex (type, rpart, ipart);
584 if (negate_expr_p (t))
585 return fold_build2_loc (loc, COMPLEX_EXPR, type,
586 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
587 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591 if (negate_expr_p (t))
592 return fold_build1_loc (loc, CONJ_EXPR, type,
593 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597 return TREE_OPERAND (t, 0);
600 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
601 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
603 /* -(A + B) -> (-B) - A. */
604 if (negate_expr_p (TREE_OPERAND (t, 1))
605 && reorder_operands_p (TREE_OPERAND (t, 0),
606 TREE_OPERAND (t, 1)))
608 tem = negate_expr (TREE_OPERAND (t, 1));
609 return fold_build2_loc (loc, MINUS_EXPR, type,
610 tem, TREE_OPERAND (t, 0));
613 /* -(A + B) -> (-A) - B. */
614 if (negate_expr_p (TREE_OPERAND (t, 0)))
616 tem = negate_expr (TREE_OPERAND (t, 0));
617 return fold_build2_loc (loc, MINUS_EXPR, type,
618 tem, TREE_OPERAND (t, 1));
624 /* - (A - B) -> B - A */
625 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
626 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
627 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
628 return fold_build2_loc (loc, MINUS_EXPR, type,
629 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633 if (TYPE_UNSIGNED (type))
639 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
641 tem = TREE_OPERAND (t, 1);
642 if (negate_expr_p (tem))
643 return fold_build2_loc (loc, TREE_CODE (t), type,
644 TREE_OPERAND (t, 0), negate_expr (tem));
645 tem = TREE_OPERAND (t, 0);
646 if (negate_expr_p (tem))
647 return fold_build2_loc (loc, TREE_CODE (t), type,
648 negate_expr (tem), TREE_OPERAND (t, 1));
657 /* In general we can't negate A / B, because if A is INT_MIN and
658 B is 1, we may turn this into INT_MIN / -1 which is undefined
659 and actually traps on some architectures. But if overflow is
660 undefined, we can negate, because - (INT_MIN / 1) is an
662 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
664 const char * const warnmsg = G_("assuming signed overflow does not "
665 "occur when negating a division");
666 tem = TREE_OPERAND (t, 1);
667 if (negate_expr_p (tem))
669 if (INTEGRAL_TYPE_P (type)
670 && (TREE_CODE (tem) != INTEGER_CST
671 || integer_onep (tem)))
672 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
673 return fold_build2_loc (loc, TREE_CODE (t), type,
674 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
679 if (INTEGRAL_TYPE_P (type)
680 && (TREE_CODE (tem) != INTEGER_CST
681 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
682 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 negate_expr (tem), TREE_OPERAND (t, 1));
690 /* Convert -((double)float) into (double)(-float). */
691 if (TREE_CODE (type) == REAL_TYPE)
693 tem = strip_float_extensions (t);
694 if (tem != t && negate_expr_p (tem))
695 return fold_convert_loc (loc, type, negate_expr (tem));
700 /* Negate -f(x) as f(-x). */
701 if (negate_mathfn_p (builtin_mathfn_code (t))
702 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
706 fndecl = get_callee_fndecl (t);
707 arg = negate_expr (CALL_EXPR_ARG (t, 0));
708 return build_call_expr_loc (loc, fndecl, 1, arg);
713 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
714 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
716 tree op1 = TREE_OPERAND (t, 1);
717 if (TREE_INT_CST_HIGH (op1) == 0
718 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
719 == TREE_INT_CST_LOW (op1))
721 tree ntype = TYPE_UNSIGNED (type)
722 ? signed_type_for (type)
723 : unsigned_type_for (type);
724 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
725 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
726 return fold_convert_loc (loc, type, temp);
738 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
739 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
751 loc = EXPR_LOCATION (t);
752 type = TREE_TYPE (t);
755 tem = fold_negate_expr (loc, t);
757 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
758 return fold_convert_loc (loc, type, tem);
761 /* Split a tree IN into a constant, literal and variable parts that could be
762 combined with CODE to make IN. "constant" means an expression with
763 TREE_CONSTANT but that isn't an actual constant. CODE must be a
764 commutative arithmetic operation. Store the constant part into *CONP,
765 the literal in *LITP and return the variable part. If a part isn't
766 present, set it to null. If the tree does not decompose in this way,
767 return the entire tree as the variable part and the other parts as null.
769 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
770 case, we negate an operand that was subtracted. Except if it is a
771 literal for which we use *MINUS_LITP instead.
773 If NEGATE_P is true, we are negating all of IN, again except a literal
774 for which we use *MINUS_LITP instead.
776 If IN is itself a literal or constant, return it as appropriate.
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
782 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
783 tree *minus_litp, int negate_p)
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
832 var = op1, neg_var_p = neg1_p;
834 /* Now do any needed negations. */
836 *minus_litp = *litp, *litp = 0;
838 *conp = negate_expr (*conp);
840 var = negate_expr (var);
842 else if (TREE_CONSTANT (in))
850 *minus_litp = *litp, *litp = 0;
851 else if (*minus_litp)
852 *litp = *minus_litp, *minus_litp = 0;
853 *conp = negate_expr (*conp);
854 var = negate_expr (var);
860 /* Re-associate trees split by the above function. T1 and T2 are
861 either expressions to associate or null. Return the new
862 expression, if any. LOC is the location of the new expression. If
863 we build an operation, do it in TYPE and with CODE. */
866 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
873 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
874 try to fold this since we will have infinite recursion. But do
875 deal with any NEGATE_EXPRs. */
876 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
877 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
879 if (code == PLUS_EXPR)
881 if (TREE_CODE (t1) == NEGATE_EXPR)
882 return build2_loc (loc, MINUS_EXPR, type,
883 fold_convert_loc (loc, type, t2),
884 fold_convert_loc (loc, type,
885 TREE_OPERAND (t1, 0)));
886 else if (TREE_CODE (t2) == NEGATE_EXPR)
887 return build2_loc (loc, MINUS_EXPR, type,
888 fold_convert_loc (loc, type, t1),
889 fold_convert_loc (loc, type,
890 TREE_OPERAND (t2, 0)));
891 else if (integer_zerop (t2))
892 return fold_convert_loc (loc, type, t1);
894 else if (code == MINUS_EXPR)
896 if (integer_zerop (t2))
897 return fold_convert_loc (loc, type, t1);
900 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
901 fold_convert_loc (loc, type, t2));
904 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type, t2));
908 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
909 for use in int_const_binop, size_binop and size_diffop. */
912 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
914 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
916 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
931 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
932 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
933 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 /* Combine two integer constants ARG1 and ARG2 under operation CODE
938 to produce a new constant. Return NULL_TREE if we don't know how
939 to evaluate CODE at compile-time. */
942 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
944 double_int op1, op2, res, tmp;
946 tree type = TREE_TYPE (arg1);
947 bool uns = TYPE_UNSIGNED (type);
949 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
950 bool overflow = false;
952 op1 = tree_to_double_int (arg1);
953 op2 = tree_to_double_int (arg2);
958 res = double_int_ior (op1, op2);
962 res = double_int_xor (op1, op2);
966 res = double_int_and (op1, op2);
970 res = double_int_rshift (op1, double_int_to_shwi (op2),
971 TYPE_PRECISION (type), !uns);
975 /* It's unclear from the C standard whether shifts can overflow.
976 The following code ignores overflow; perhaps a C standard
977 interpretation ruling is needed. */
978 res = double_int_lshift (op1, double_int_to_shwi (op2),
979 TYPE_PRECISION (type), !uns);
983 res = double_int_rrotate (op1, double_int_to_shwi (op2),
984 TYPE_PRECISION (type));
988 res = double_int_lrotate (op1, double_int_to_shwi (op2),
989 TYPE_PRECISION (type));
993 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
994 &res.low, &res.high);
998 neg_double (op2.low, op2.high, &res.low, &res.high);
999 add_double (op1.low, op1.high, res.low, res.high,
1000 &res.low, &res.high);
1001 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1005 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1006 &res.low, &res.high);
1009 case TRUNC_DIV_EXPR:
1010 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1011 case EXACT_DIV_EXPR:
1012 /* This is a shortcut for a common special case. */
1013 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1014 && !TREE_OVERFLOW (arg1)
1015 && !TREE_OVERFLOW (arg2)
1016 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1018 if (code == CEIL_DIV_EXPR)
1019 op1.low += op2.low - 1;
1021 res.low = op1.low / op2.low, res.high = 0;
1025 /* ... fall through ... */
1027 case ROUND_DIV_EXPR:
1028 if (double_int_zero_p (op2))
1030 if (double_int_one_p (op2))
1035 if (double_int_equal_p (op1, op2)
1036 && ! double_int_zero_p (op1))
1038 res = double_int_one;
1041 overflow = div_and_round_double (code, uns,
1042 op1.low, op1.high, op2.low, op2.high,
1043 &res.low, &res.high,
1044 &tmp.low, &tmp.high);
1047 case TRUNC_MOD_EXPR:
1048 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1049 /* This is a shortcut for a common special case. */
1050 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1051 && !TREE_OVERFLOW (arg1)
1052 && !TREE_OVERFLOW (arg2)
1053 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1055 if (code == CEIL_MOD_EXPR)
1056 op1.low += op2.low - 1;
1057 res.low = op1.low % op2.low, res.high = 0;
1061 /* ... fall through ... */
1063 case ROUND_MOD_EXPR:
1064 if (double_int_zero_p (op2))
1066 overflow = div_and_round_double (code, uns,
1067 op1.low, op1.high, op2.low, op2.high,
1068 &tmp.low, &tmp.high,
1069 &res.low, &res.high);
1073 res = double_int_min (op1, op2, uns);
1077 res = double_int_max (op1, op2, uns);
1084 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1085 ((!uns || is_sizetype) && overflow)
1086 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1091 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1092 constant. We assume ARG1 and ARG2 have the same data type, or at least
1093 are the same kind of constant and the same machine mode. Return zero if
1094 combining the constants is not allowed in the current operating mode. */
1097 const_binop (enum tree_code code, tree arg1, tree arg2)
1099 /* Sanity check for the recursive cases. */
1106 if (TREE_CODE (arg1) == INTEGER_CST)
1107 return int_const_binop (code, arg1, arg2);
1109 if (TREE_CODE (arg1) == REAL_CST)
1111 enum machine_mode mode;
1114 REAL_VALUE_TYPE value;
1115 REAL_VALUE_TYPE result;
1119 /* The following codes are handled by real_arithmetic. */
1134 d1 = TREE_REAL_CST (arg1);
1135 d2 = TREE_REAL_CST (arg2);
1137 type = TREE_TYPE (arg1);
1138 mode = TYPE_MODE (type);
1140 /* Don't perform operation if we honor signaling NaNs and
1141 either operand is a NaN. */
1142 if (HONOR_SNANS (mode)
1143 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1146 /* Don't perform operation if it would raise a division
1147 by zero exception. */
1148 if (code == RDIV_EXPR
1149 && REAL_VALUES_EQUAL (d2, dconst0)
1150 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1153 /* If either operand is a NaN, just return it. Otherwise, set up
1154 for floating-point trap; we return an overflow. */
1155 if (REAL_VALUE_ISNAN (d1))
1157 else if (REAL_VALUE_ISNAN (d2))
1160 inexact = real_arithmetic (&value, code, &d1, &d2);
1161 real_convert (&result, mode, &value);
1163 /* Don't constant fold this floating point operation if
1164 the result has overflowed and flag_trapping_math. */
1165 if (flag_trapping_math
1166 && MODE_HAS_INFINITIES (mode)
1167 && REAL_VALUE_ISINF (result)
1168 && !REAL_VALUE_ISINF (d1)
1169 && !REAL_VALUE_ISINF (d2))
1172 /* Don't constant fold this floating point operation if the
1173 result may dependent upon the run-time rounding mode and
1174 flag_rounding_math is set, or if GCC's software emulation
1175 is unable to accurately represent the result. */
1176 if ((flag_rounding_math
1177 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1178 && (inexact || !real_identical (&result, &value)))
1181 t = build_real (type, result);
1183 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1187 if (TREE_CODE (arg1) == FIXED_CST)
1189 FIXED_VALUE_TYPE f1;
1190 FIXED_VALUE_TYPE f2;
1191 FIXED_VALUE_TYPE result;
1196 /* The following codes are handled by fixed_arithmetic. */
1202 case TRUNC_DIV_EXPR:
1203 f2 = TREE_FIXED_CST (arg2);
1208 f2.data.high = TREE_INT_CST_HIGH (arg2);
1209 f2.data.low = TREE_INT_CST_LOW (arg2);
1217 f1 = TREE_FIXED_CST (arg1);
1218 type = TREE_TYPE (arg1);
1219 sat_p = TYPE_SATURATING (type);
1220 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1221 t = build_fixed (type, result);
1222 /* Propagate overflow flags. */
1223 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1224 TREE_OVERFLOW (t) = 1;
1228 if (TREE_CODE (arg1) == COMPLEX_CST)
1230 tree type = TREE_TYPE (arg1);
1231 tree r1 = TREE_REALPART (arg1);
1232 tree i1 = TREE_IMAGPART (arg1);
1233 tree r2 = TREE_REALPART (arg2);
1234 tree i2 = TREE_IMAGPART (arg2);
1241 real = const_binop (code, r1, r2);
1242 imag = const_binop (code, i1, i2);
1246 if (COMPLEX_FLOAT_TYPE_P (type))
1247 return do_mpc_arg2 (arg1, arg2, type,
1248 /* do_nonfinite= */ folding_initializer,
1251 real = const_binop (MINUS_EXPR,
1252 const_binop (MULT_EXPR, r1, r2),
1253 const_binop (MULT_EXPR, i1, i2));
1254 imag = const_binop (PLUS_EXPR,
1255 const_binop (MULT_EXPR, r1, i2),
1256 const_binop (MULT_EXPR, i1, r2));
1260 if (COMPLEX_FLOAT_TYPE_P (type))
1261 return do_mpc_arg2 (arg1, arg2, type,
1262 /* do_nonfinite= */ folding_initializer,
1265 case TRUNC_DIV_EXPR:
1267 case FLOOR_DIV_EXPR:
1268 case ROUND_DIV_EXPR:
1269 if (flag_complex_method == 0)
1271 /* Keep this algorithm in sync with
1272 tree-complex.c:expand_complex_div_straight().
1274 Expand complex division to scalars, straightforward algorithm.
1275 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1279 = const_binop (PLUS_EXPR,
1280 const_binop (MULT_EXPR, r2, r2),
1281 const_binop (MULT_EXPR, i2, i2));
1283 = const_binop (PLUS_EXPR,
1284 const_binop (MULT_EXPR, r1, r2),
1285 const_binop (MULT_EXPR, i1, i2));
1287 = const_binop (MINUS_EXPR,
1288 const_binop (MULT_EXPR, i1, r2),
1289 const_binop (MULT_EXPR, r1, i2));
1291 real = const_binop (code, t1, magsquared);
1292 imag = const_binop (code, t2, magsquared);
1296 /* Keep this algorithm in sync with
1297 tree-complex.c:expand_complex_div_wide().
1299 Expand complex division to scalars, modified algorithm to minimize
1300 overflow with wide input ranges. */
1301 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1302 fold_abs_const (r2, TREE_TYPE (type)),
1303 fold_abs_const (i2, TREE_TYPE (type)));
1305 if (integer_nonzerop (compare))
1307 /* In the TRUE branch, we compute
1309 div = (br * ratio) + bi;
1310 tr = (ar * ratio) + ai;
1311 ti = (ai * ratio) - ar;
1314 tree ratio = const_binop (code, r2, i2);
1315 tree div = const_binop (PLUS_EXPR, i2,
1316 const_binop (MULT_EXPR, r2, ratio));
1317 real = const_binop (MULT_EXPR, r1, ratio);
1318 real = const_binop (PLUS_EXPR, real, i1);
1319 real = const_binop (code, real, div);
1321 imag = const_binop (MULT_EXPR, i1, ratio);
1322 imag = const_binop (MINUS_EXPR, imag, r1);
1323 imag = const_binop (code, imag, div);
1327 /* In the FALSE branch, we compute
1329 divisor = (d * ratio) + c;
1330 tr = (b * ratio) + a;
1331 ti = b - (a * ratio);
1334 tree ratio = const_binop (code, i2, r2);
1335 tree div = const_binop (PLUS_EXPR, r2,
1336 const_binop (MULT_EXPR, i2, ratio));
1338 real = const_binop (MULT_EXPR, i1, ratio);
1339 real = const_binop (PLUS_EXPR, real, r1);
1340 real = const_binop (code, real, div);
1342 imag = const_binop (MULT_EXPR, r1, ratio);
1343 imag = const_binop (MINUS_EXPR, i1, imag);
1344 imag = const_binop (code, imag, div);
1354 return build_complex (type, real, imag);
1357 if (TREE_CODE (arg1) == VECTOR_CST)
1359 tree type = TREE_TYPE(arg1);
1360 int count = TYPE_VECTOR_SUBPARTS (type), i;
1361 tree elements1, elements2, list = NULL_TREE;
1363 if(TREE_CODE(arg2) != VECTOR_CST)
1366 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1367 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1369 for (i = 0; i < count; i++)
1371 tree elem1, elem2, elem;
1373 /* The trailing elements can be empty and should be treated as 0 */
1375 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1378 elem1 = TREE_VALUE(elements1);
1379 elements1 = TREE_CHAIN (elements1);
1383 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1386 elem2 = TREE_VALUE(elements2);
1387 elements2 = TREE_CHAIN (elements2);
1390 elem = const_binop (code, elem1, elem2);
1392 /* It is possible that const_binop cannot handle the given
1393 code and return NULL_TREE */
1394 if(elem == NULL_TREE)
1397 list = tree_cons (NULL_TREE, elem, list);
1399 return build_vector(type, nreverse(list));
1404 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1405 indicates which particular sizetype to create. */
1408 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1410 return build_int_cst (sizetype_tab[(int) kind], number);
1413 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1414 is a tree code. The type of the result is taken from the operands.
1415 Both must be equivalent integer types, ala int_binop_types_match_p.
1416 If the operands are constant, so is the result. */
1419 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1421 tree type = TREE_TYPE (arg0);
1423 if (arg0 == error_mark_node || arg1 == error_mark_node)
1424 return error_mark_node;
1426 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1429 /* Handle the special case of two integer constants faster. */
1430 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1432 /* And some specific cases even faster than that. */
1433 if (code == PLUS_EXPR)
1435 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1437 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1440 else if (code == MINUS_EXPR)
1442 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1445 else if (code == MULT_EXPR)
1447 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1451 /* Handle general case of two integer constants. */
1452 return int_const_binop (code, arg0, arg1);
1455 return fold_build2_loc (loc, code, type, arg0, arg1);
1458 /* Given two values, either both of sizetype or both of bitsizetype,
1459 compute the difference between the two values. Return the value
1460 in signed type corresponding to the type of the operands. */
1463 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1465 tree type = TREE_TYPE (arg0);
1468 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1471 /* If the type is already signed, just do the simple thing. */
1472 if (!TYPE_UNSIGNED (type))
1473 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1475 if (type == sizetype)
1477 else if (type == bitsizetype)
1478 ctype = sbitsizetype;
1480 ctype = signed_type_for (type);
1482 /* If either operand is not a constant, do the conversions to the signed
1483 type and subtract. The hardware will do the right thing with any
1484 overflow in the subtraction. */
1485 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1486 return size_binop_loc (loc, MINUS_EXPR,
1487 fold_convert_loc (loc, ctype, arg0),
1488 fold_convert_loc (loc, ctype, arg1));
1490 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1491 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1492 overflow) and negate (which can't either). Special-case a result
1493 of zero while we're here. */
1494 if (tree_int_cst_equal (arg0, arg1))
1495 return build_int_cst (ctype, 0);
1496 else if (tree_int_cst_lt (arg1, arg0))
1497 return fold_convert_loc (loc, ctype,
1498 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1500 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1501 fold_convert_loc (loc, ctype,
1502 size_binop_loc (loc,
1507 /* A subroutine of fold_convert_const handling conversions of an
1508 INTEGER_CST to another integer type. */
1511 fold_convert_const_int_from_int (tree type, const_tree arg1)
1515 /* Given an integer constant, make new constant with new type,
1516 appropriately sign-extended or truncated. */
1517 t = force_fit_type_double (type, tree_to_double_int (arg1),
1518 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1519 (TREE_INT_CST_HIGH (arg1) < 0
1520 && (TYPE_UNSIGNED (type)
1521 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1522 | TREE_OVERFLOW (arg1));
1527 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1528 to an integer type. */
1531 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1536 /* The following code implements the floating point to integer
1537 conversion rules required by the Java Language Specification,
1538 that IEEE NaNs are mapped to zero and values that overflow
1539 the target precision saturate, i.e. values greater than
1540 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1541 are mapped to INT_MIN. These semantics are allowed by the
1542 C and C++ standards that simply state that the behavior of
1543 FP-to-integer conversion is unspecified upon overflow. */
1547 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1551 case FIX_TRUNC_EXPR:
1552 real_trunc (&r, VOIDmode, &x);
1559 /* If R is NaN, return zero and show we have an overflow. */
1560 if (REAL_VALUE_ISNAN (r))
1563 val = double_int_zero;
1566 /* See if R is less than the lower bound or greater than the
1571 tree lt = TYPE_MIN_VALUE (type);
1572 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1573 if (REAL_VALUES_LESS (r, l))
1576 val = tree_to_double_int (lt);
1582 tree ut = TYPE_MAX_VALUE (type);
1585 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1586 if (REAL_VALUES_LESS (u, r))
1589 val = tree_to_double_int (ut);
1595 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1597 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1601 /* A subroutine of fold_convert_const handling conversions of a
1602 FIXED_CST to an integer type. */
1605 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1608 double_int temp, temp_trunc;
1611 /* Right shift FIXED_CST to temp by fbit. */
1612 temp = TREE_FIXED_CST (arg1).data;
1613 mode = TREE_FIXED_CST (arg1).mode;
1614 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1616 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1617 HOST_BITS_PER_DOUBLE_INT,
1618 SIGNED_FIXED_POINT_MODE_P (mode));
1620 /* Left shift temp to temp_trunc by fbit. */
1621 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1622 HOST_BITS_PER_DOUBLE_INT,
1623 SIGNED_FIXED_POINT_MODE_P (mode));
1627 temp = double_int_zero;
1628 temp_trunc = double_int_zero;
1631 /* If FIXED_CST is negative, we need to round the value toward 0.
1632 By checking if the fractional bits are not zero to add 1 to temp. */
1633 if (SIGNED_FIXED_POINT_MODE_P (mode)
1634 && double_int_negative_p (temp_trunc)
1635 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1636 temp = double_int_add (temp, double_int_one);
1638 /* Given a fixed-point constant, make new constant with new type,
1639 appropriately sign-extended or truncated. */
1640 t = force_fit_type_double (type, temp, -1,
1641 (double_int_negative_p (temp)
1642 && (TYPE_UNSIGNED (type)
1643 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1644 | TREE_OVERFLOW (arg1));
1649 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1650 to another floating point type. */
1653 fold_convert_const_real_from_real (tree type, const_tree arg1)
1655 REAL_VALUE_TYPE value;
1658 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1659 t = build_real (type, value);
1661 /* If converting an infinity or NAN to a representation that doesn't
1662 have one, set the overflow bit so that we can produce some kind of
1663 error message at the appropriate point if necessary. It's not the
1664 most user-friendly message, but it's better than nothing. */
1665 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1669 && !MODE_HAS_NANS (TYPE_MODE (type)))
1670 TREE_OVERFLOW (t) = 1;
1671 /* Regular overflow, conversion produced an infinity in a mode that
1672 can't represent them. */
1673 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1674 && REAL_VALUE_ISINF (value)
1675 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1676 TREE_OVERFLOW (t) = 1;
1678 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1682 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1683 to a floating point type. */
1686 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1688 REAL_VALUE_TYPE value;
1691 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1692 t = build_real (type, value);
1694 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1698 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1699 to another fixed-point type. */
1702 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1704 FIXED_VALUE_TYPE value;
1708 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1709 TYPE_SATURATING (type));
1710 t = build_fixed (type, value);
1712 /* Propagate overflow flags. */
1713 if (overflow_p | TREE_OVERFLOW (arg1))
1714 TREE_OVERFLOW (t) = 1;
1718 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1719 to a fixed-point type. */
1722 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1724 FIXED_VALUE_TYPE value;
1728 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1729 TREE_INT_CST (arg1),
1730 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1731 TYPE_SATURATING (type));
1732 t = build_fixed (type, value);
1734 /* Propagate overflow flags. */
1735 if (overflow_p | TREE_OVERFLOW (arg1))
1736 TREE_OVERFLOW (t) = 1;
1740 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1741 to a fixed-point type. */
1744 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1746 FIXED_VALUE_TYPE value;
1750 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1751 &TREE_REAL_CST (arg1),
1752 TYPE_SATURATING (type));
1753 t = build_fixed (type, value);
1755 /* Propagate overflow flags. */
1756 if (overflow_p | TREE_OVERFLOW (arg1))
1757 TREE_OVERFLOW (t) = 1;
1761 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1762 type TYPE. If no simplification can be done return NULL_TREE. */
1765 fold_convert_const (enum tree_code code, tree type, tree arg1)
1767 if (TREE_TYPE (arg1) == type)
1770 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1771 || TREE_CODE (type) == OFFSET_TYPE)
1773 if (TREE_CODE (arg1) == INTEGER_CST)
1774 return fold_convert_const_int_from_int (type, arg1);
1775 else if (TREE_CODE (arg1) == REAL_CST)
1776 return fold_convert_const_int_from_real (code, type, arg1);
1777 else if (TREE_CODE (arg1) == FIXED_CST)
1778 return fold_convert_const_int_from_fixed (type, arg1);
1780 else if (TREE_CODE (type) == REAL_TYPE)
1782 if (TREE_CODE (arg1) == INTEGER_CST)
1783 return build_real_from_int_cst (type, arg1);
1784 else if (TREE_CODE (arg1) == REAL_CST)
1785 return fold_convert_const_real_from_real (type, arg1);
1786 else if (TREE_CODE (arg1) == FIXED_CST)
1787 return fold_convert_const_real_from_fixed (type, arg1);
1789 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1791 if (TREE_CODE (arg1) == FIXED_CST)
1792 return fold_convert_const_fixed_from_fixed (type, arg1);
1793 else if (TREE_CODE (arg1) == INTEGER_CST)
1794 return fold_convert_const_fixed_from_int (type, arg1);
1795 else if (TREE_CODE (arg1) == REAL_CST)
1796 return fold_convert_const_fixed_from_real (type, arg1);
1801 /* Construct a vector of zero elements of vector type TYPE. */
1804 build_zero_vector (tree type)
1808 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1809 return build_vector_from_val (type, t);
1812 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1815 fold_convertible_p (const_tree type, const_tree arg)
1817 tree orig = TREE_TYPE (arg);
1822 if (TREE_CODE (arg) == ERROR_MARK
1823 || TREE_CODE (type) == ERROR_MARK
1824 || TREE_CODE (orig) == ERROR_MARK)
1827 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1830 switch (TREE_CODE (type))
1832 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1833 case POINTER_TYPE: case REFERENCE_TYPE:
1835 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1836 || TREE_CODE (orig) == OFFSET_TYPE)
1838 return (TREE_CODE (orig) == VECTOR_TYPE
1839 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1842 case FIXED_POINT_TYPE:
1846 return TREE_CODE (type) == TREE_CODE (orig);
1853 /* Convert expression ARG to type TYPE. Used by the middle-end for
1854 simple conversions in preference to calling the front-end's convert. */
1857 fold_convert_loc (location_t loc, tree type, tree arg)
1859 tree orig = TREE_TYPE (arg);
1865 if (TREE_CODE (arg) == ERROR_MARK
1866 || TREE_CODE (type) == ERROR_MARK
1867 || TREE_CODE (orig) == ERROR_MARK)
1868 return error_mark_node;
1870 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1871 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1873 switch (TREE_CODE (type))
1876 case REFERENCE_TYPE:
1877 /* Handle conversions between pointers to different address spaces. */
1878 if (POINTER_TYPE_P (orig)
1879 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1880 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1881 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1884 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1886 if (TREE_CODE (arg) == INTEGER_CST)
1888 tem = fold_convert_const (NOP_EXPR, type, arg);
1889 if (tem != NULL_TREE)
1892 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1893 || TREE_CODE (orig) == OFFSET_TYPE)
1894 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1895 if (TREE_CODE (orig) == COMPLEX_TYPE)
1896 return fold_convert_loc (loc, type,
1897 fold_build1_loc (loc, REALPART_EXPR,
1898 TREE_TYPE (orig), arg));
1899 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1900 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1901 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1904 if (TREE_CODE (arg) == INTEGER_CST)
1906 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1910 else if (TREE_CODE (arg) == REAL_CST)
1912 tem = fold_convert_const (NOP_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1916 else if (TREE_CODE (arg) == FIXED_CST)
1918 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1919 if (tem != NULL_TREE)
1923 switch (TREE_CODE (orig))
1926 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1927 case POINTER_TYPE: case REFERENCE_TYPE:
1928 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1931 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1933 case FIXED_POINT_TYPE:
1934 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1937 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1938 return fold_convert_loc (loc, type, tem);
1944 case FIXED_POINT_TYPE:
1945 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1946 || TREE_CODE (arg) == REAL_CST)
1948 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1949 if (tem != NULL_TREE)
1950 goto fold_convert_exit;
1953 switch (TREE_CODE (orig))
1955 case FIXED_POINT_TYPE:
1960 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1963 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1964 return fold_convert_loc (loc, type, tem);
1971 switch (TREE_CODE (orig))
1974 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1975 case POINTER_TYPE: case REFERENCE_TYPE:
1977 case FIXED_POINT_TYPE:
1978 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1979 fold_convert_loc (loc, TREE_TYPE (type), arg),
1980 fold_convert_loc (loc, TREE_TYPE (type),
1981 integer_zero_node));
1986 if (TREE_CODE (arg) == COMPLEX_EXPR)
1988 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1989 TREE_OPERAND (arg, 0));
1990 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1991 TREE_OPERAND (arg, 1));
1992 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1995 arg = save_expr (arg);
1996 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1997 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1998 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1999 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2000 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2008 if (integer_zerop (arg))
2009 return build_zero_vector (type);
2010 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2011 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2012 || TREE_CODE (orig) == VECTOR_TYPE);
2013 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2016 tem = fold_ignored_result (arg);
2017 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2023 protected_set_expr_location_unshare (tem, loc);
2027 /* Return false if expr can be assumed not to be an lvalue, true
2031 maybe_lvalue_p (const_tree x)
2033 /* We only need to wrap lvalue tree codes. */
2034 switch (TREE_CODE (x))
2047 case ARRAY_RANGE_REF:
2053 case PREINCREMENT_EXPR:
2054 case PREDECREMENT_EXPR:
2056 case TRY_CATCH_EXPR:
2057 case WITH_CLEANUP_EXPR:
2066 /* Assume the worst for front-end tree codes. */
2067 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2075 /* Return an expr equal to X but certainly not valid as an lvalue. */
2078 non_lvalue_loc (location_t loc, tree x)
2080 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2085 if (! maybe_lvalue_p (x))
2087 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2090 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2091 Zero means allow extended lvalues. */
2093 int pedantic_lvalues;
2095 /* When pedantic, return an expr equal to X but certainly not valid as a
2096 pedantic lvalue. Otherwise, return X. */
2099 pedantic_non_lvalue_loc (location_t loc, tree x)
2101 if (pedantic_lvalues)
2102 return non_lvalue_loc (loc, x);
2104 return protected_set_expr_location_unshare (x, loc);
2107 /* Given a tree comparison code, return the code that is the logical inverse
2108 of the given code. It is not safe to do this for floating-point
2109 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2110 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2113 invert_tree_comparison (enum tree_code code, bool honor_nans)
2115 if (honor_nans && flag_trapping_math)
2125 return honor_nans ? UNLE_EXPR : LE_EXPR;
2127 return honor_nans ? UNLT_EXPR : LT_EXPR;
2129 return honor_nans ? UNGE_EXPR : GE_EXPR;
2131 return honor_nans ? UNGT_EXPR : GT_EXPR;
2145 return UNORDERED_EXPR;
2146 case UNORDERED_EXPR:
2147 return ORDERED_EXPR;
2153 /* Similar, but return the comparison that results if the operands are
2154 swapped. This is safe for floating-point. */
2157 swap_tree_comparison (enum tree_code code)
2164 case UNORDERED_EXPR:
2190 /* Convert a comparison tree code from an enum tree_code representation
2191 into a compcode bit-based encoding. This function is the inverse of
2192 compcode_to_comparison. */
2194 static enum comparison_code
2195 comparison_to_compcode (enum tree_code code)
2212 return COMPCODE_ORD;
2213 case UNORDERED_EXPR:
2214 return COMPCODE_UNORD;
2216 return COMPCODE_UNLT;
2218 return COMPCODE_UNEQ;
2220 return COMPCODE_UNLE;
2222 return COMPCODE_UNGT;
2224 return COMPCODE_LTGT;
2226 return COMPCODE_UNGE;
2232 /* Convert a compcode bit-based encoding of a comparison operator back
2233 to GCC's enum tree_code representation. This function is the
2234 inverse of comparison_to_compcode. */
2236 static enum tree_code
2237 compcode_to_comparison (enum comparison_code code)
2254 return ORDERED_EXPR;
2255 case COMPCODE_UNORD:
2256 return UNORDERED_EXPR;
2274 /* Return a tree for the comparison which is the combination of
2275 doing the AND or OR (depending on CODE) of the two operations LCODE
2276 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2277 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2278 if this makes the transformation invalid. */
2281 combine_comparisons (location_t loc,
2282 enum tree_code code, enum tree_code lcode,
2283 enum tree_code rcode, tree truth_type,
2284 tree ll_arg, tree lr_arg)
2286 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2287 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2288 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2293 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2294 compcode = lcompcode & rcompcode;
2297 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2298 compcode = lcompcode | rcompcode;
2307 /* Eliminate unordered comparisons, as well as LTGT and ORD
2308 which are not used unless the mode has NaNs. */
2309 compcode &= ~COMPCODE_UNORD;
2310 if (compcode == COMPCODE_LTGT)
2311 compcode = COMPCODE_NE;
2312 else if (compcode == COMPCODE_ORD)
2313 compcode = COMPCODE_TRUE;
2315 else if (flag_trapping_math)
2317 /* Check that the original operation and the optimized ones will trap
2318 under the same condition. */
2319 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2320 && (lcompcode != COMPCODE_EQ)
2321 && (lcompcode != COMPCODE_ORD);
2322 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2323 && (rcompcode != COMPCODE_EQ)
2324 && (rcompcode != COMPCODE_ORD);
2325 bool trap = (compcode & COMPCODE_UNORD) == 0
2326 && (compcode != COMPCODE_EQ)
2327 && (compcode != COMPCODE_ORD);
2329 /* In a short-circuited boolean expression the LHS might be
2330 such that the RHS, if evaluated, will never trap. For
2331 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2332 if neither x nor y is NaN. (This is a mixed blessing: for
2333 example, the expression above will never trap, hence
2334 optimizing it to x < y would be invalid). */
2335 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2336 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2339 /* If the comparison was short-circuited, and only the RHS
2340 trapped, we may now generate a spurious trap. */
2342 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2345 /* If we changed the conditions that cause a trap, we lose. */
2346 if ((ltrap || rtrap) != trap)
2350 if (compcode == COMPCODE_TRUE)
2351 return constant_boolean_node (true, truth_type);
2352 else if (compcode == COMPCODE_FALSE)
2353 return constant_boolean_node (false, truth_type);
2356 enum tree_code tcode;
2358 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2359 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2363 /* Return nonzero if two operands (typically of the same tree node)
2364 are necessarily equal. If either argument has side-effects this
2365 function returns zero. FLAGS modifies behavior as follows:
2367 If OEP_ONLY_CONST is set, only return nonzero for constants.
2368 This function tests whether the operands are indistinguishable;
2369 it does not test whether they are equal using C's == operation.
2370 The distinction is important for IEEE floating point, because
2371 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2372 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2374 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2375 even though it may hold multiple values during a function.
2376 This is because a GCC tree node guarantees that nothing else is
2377 executed between the evaluation of its "operands" (which may often
2378 be evaluated in arbitrary order). Hence if the operands themselves
2379 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2380 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2381 unset means assuming isochronic (or instantaneous) tree equivalence.
2382 Unless comparing arbitrary expression trees, such as from different
2383 statements, this flag can usually be left unset.
2385 If OEP_PURE_SAME is set, then pure functions with identical arguments
2386 are considered the same. It is used when the caller has other ways
2387 to ensure that global memory is unchanged in between. */
2390 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2392 /* If either is ERROR_MARK, they aren't equal. */
2393 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2394 || TREE_TYPE (arg0) == error_mark_node
2395 || TREE_TYPE (arg1) == error_mark_node)
2398 /* Similar, if either does not have a type (like a released SSA name),
2399 they aren't equal. */
2400 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2403 /* Check equality of integer constants before bailing out due to
2404 precision differences. */
2405 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2406 return tree_int_cst_equal (arg0, arg1);
2408 /* If both types don't have the same signedness, then we can't consider
2409 them equal. We must check this before the STRIP_NOPS calls
2410 because they may change the signedness of the arguments. As pointers
2411 strictly don't have a signedness, require either two pointers or
2412 two non-pointers as well. */
2413 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2414 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2417 /* We cannot consider pointers to different address space equal. */
2418 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2419 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2420 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2423 /* If both types don't have the same precision, then it is not safe
2425 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2431 /* In case both args are comparisons but with different comparison
2432 code, try to swap the comparison operands of one arg to produce
2433 a match and compare that variant. */
2434 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2435 && COMPARISON_CLASS_P (arg0)
2436 && COMPARISON_CLASS_P (arg1))
2438 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2440 if (TREE_CODE (arg0) == swap_code)
2441 return operand_equal_p (TREE_OPERAND (arg0, 0),
2442 TREE_OPERAND (arg1, 1), flags)
2443 && operand_equal_p (TREE_OPERAND (arg0, 1),
2444 TREE_OPERAND (arg1, 0), flags);
2447 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2448 /* This is needed for conversions and for COMPONENT_REF.
2449 Might as well play it safe and always test this. */
2450 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2451 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2452 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2455 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2456 We don't care about side effects in that case because the SAVE_EXPR
2457 takes care of that for us. In all other cases, two expressions are
2458 equal if they have no side effects. If we have two identical
2459 expressions with side effects that should be treated the same due
2460 to the only side effects being identical SAVE_EXPR's, that will
2461 be detected in the recursive calls below.
2462 If we are taking an invariant address of two identical objects
2463 they are necessarily equal as well. */
2464 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2465 && (TREE_CODE (arg0) == SAVE_EXPR
2466 || (flags & OEP_CONSTANT_ADDRESS_OF)
2467 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2470 /* Next handle constant cases, those for which we can return 1 even
2471 if ONLY_CONST is set. */
2472 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2473 switch (TREE_CODE (arg0))
2476 return tree_int_cst_equal (arg0, arg1);
2479 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2480 TREE_FIXED_CST (arg1));
2483 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2484 TREE_REAL_CST (arg1)))
2488 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2490 /* If we do not distinguish between signed and unsigned zero,
2491 consider them equal. */
2492 if (real_zerop (arg0) && real_zerop (arg1))
2501 v1 = TREE_VECTOR_CST_ELTS (arg0);
2502 v2 = TREE_VECTOR_CST_ELTS (arg1);
2505 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2508 v1 = TREE_CHAIN (v1);
2509 v2 = TREE_CHAIN (v2);
2516 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2518 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2522 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2523 && ! memcmp (TREE_STRING_POINTER (arg0),
2524 TREE_STRING_POINTER (arg1),
2525 TREE_STRING_LENGTH (arg0)));
2528 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2529 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2530 ? OEP_CONSTANT_ADDRESS_OF : 0);
2535 if (flags & OEP_ONLY_CONST)
2538 /* Define macros to test an operand from arg0 and arg1 for equality and a
2539 variant that allows null and views null as being different from any
2540 non-null value. In the latter case, if either is null, the both
2541 must be; otherwise, do the normal comparison. */
2542 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2543 TREE_OPERAND (arg1, N), flags)
2545 #define OP_SAME_WITH_NULL(N) \
2546 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2547 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2549 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2552 /* Two conversions are equal only if signedness and modes match. */
2553 switch (TREE_CODE (arg0))
2556 case FIX_TRUNC_EXPR:
2557 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2558 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2568 case tcc_comparison:
2570 if (OP_SAME (0) && OP_SAME (1))
2573 /* For commutative ops, allow the other order. */
2574 return (commutative_tree_code (TREE_CODE (arg0))
2575 && operand_equal_p (TREE_OPERAND (arg0, 0),
2576 TREE_OPERAND (arg1, 1), flags)
2577 && operand_equal_p (TREE_OPERAND (arg0, 1),
2578 TREE_OPERAND (arg1, 0), flags));
2581 /* If either of the pointer (or reference) expressions we are
2582 dereferencing contain a side effect, these cannot be equal. */
2583 if (TREE_SIDE_EFFECTS (arg0)
2584 || TREE_SIDE_EFFECTS (arg1))
2587 switch (TREE_CODE (arg0))
2595 /* Require equal access sizes, and similar pointer types.
2596 We can have incomplete types for array references of
2597 variable-sized arrays from the Fortran frontent
2599 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2600 || (TYPE_SIZE (TREE_TYPE (arg0))
2601 && TYPE_SIZE (TREE_TYPE (arg1))
2602 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2603 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2604 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2605 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2606 && OP_SAME (0) && OP_SAME (1));
2609 case ARRAY_RANGE_REF:
2610 /* Operands 2 and 3 may be null.
2611 Compare the array index by value if it is constant first as we
2612 may have different types but same value here. */
2614 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2615 TREE_OPERAND (arg1, 1))
2617 && OP_SAME_WITH_NULL (2)
2618 && OP_SAME_WITH_NULL (3));
2621 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2622 may be NULL when we're called to compare MEM_EXPRs. */
2623 return OP_SAME_WITH_NULL (0)
2625 && OP_SAME_WITH_NULL (2);
2628 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2634 case tcc_expression:
2635 switch (TREE_CODE (arg0))
2638 case TRUTH_NOT_EXPR:
2641 case TRUTH_ANDIF_EXPR:
2642 case TRUTH_ORIF_EXPR:
2643 return OP_SAME (0) && OP_SAME (1);
2646 case WIDEN_MULT_PLUS_EXPR:
2647 case WIDEN_MULT_MINUS_EXPR:
2650 /* The multiplcation operands are commutative. */
2653 case TRUTH_AND_EXPR:
2655 case TRUTH_XOR_EXPR:
2656 if (OP_SAME (0) && OP_SAME (1))
2659 /* Otherwise take into account this is a commutative operation. */
2660 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2661 TREE_OPERAND (arg1, 1), flags)
2662 && operand_equal_p (TREE_OPERAND (arg0, 1),
2663 TREE_OPERAND (arg1, 0), flags));
2668 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2675 switch (TREE_CODE (arg0))
2678 /* If the CALL_EXPRs call different functions, then they
2679 clearly can not be equal. */
2680 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2685 unsigned int cef = call_expr_flags (arg0);
2686 if (flags & OEP_PURE_SAME)
2687 cef &= ECF_CONST | ECF_PURE;
2694 /* Now see if all the arguments are the same. */
2696 const_call_expr_arg_iterator iter0, iter1;
2698 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2699 a1 = first_const_call_expr_arg (arg1, &iter1);
2701 a0 = next_const_call_expr_arg (&iter0),
2702 a1 = next_const_call_expr_arg (&iter1))
2703 if (! operand_equal_p (a0, a1, flags))
2706 /* If we get here and both argument lists are exhausted
2707 then the CALL_EXPRs are equal. */
2708 return ! (a0 || a1);
2714 case tcc_declaration:
2715 /* Consider __builtin_sqrt equal to sqrt. */
2716 return (TREE_CODE (arg0) == FUNCTION_DECL
2717 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2718 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2719 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2726 #undef OP_SAME_WITH_NULL
2729 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2730 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2732 When in doubt, return 0. */
2735 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2737 int unsignedp1, unsignedpo;
2738 tree primarg0, primarg1, primother;
2739 unsigned int correct_width;
2741 if (operand_equal_p (arg0, arg1, 0))
2744 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2745 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2748 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2749 and see if the inner values are the same. This removes any
2750 signedness comparison, which doesn't matter here. */
2751 primarg0 = arg0, primarg1 = arg1;
2752 STRIP_NOPS (primarg0);
2753 STRIP_NOPS (primarg1);
2754 if (operand_equal_p (primarg0, primarg1, 0))
2757 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2758 actual comparison operand, ARG0.
2760 First throw away any conversions to wider types
2761 already present in the operands. */
2763 primarg1 = get_narrower (arg1, &unsignedp1);
2764 primother = get_narrower (other, &unsignedpo);
2766 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2767 if (unsignedp1 == unsignedpo
2768 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2769 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2771 tree type = TREE_TYPE (arg0);
2773 /* Make sure shorter operand is extended the right way
2774 to match the longer operand. */
2775 primarg1 = fold_convert (signed_or_unsigned_type_for
2776 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2778 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2785 /* See if ARG is an expression that is either a comparison or is performing
2786 arithmetic on comparisons. The comparisons must only be comparing
2787 two different values, which will be stored in *CVAL1 and *CVAL2; if
2788 they are nonzero it means that some operands have already been found.
2789 No variables may be used anywhere else in the expression except in the
2790 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2791 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2793 If this is true, return 1. Otherwise, return zero. */
2796 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2798 enum tree_code code = TREE_CODE (arg);
2799 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2801 /* We can handle some of the tcc_expression cases here. */
2802 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2804 else if (tclass == tcc_expression
2805 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2806 || code == COMPOUND_EXPR))
2807 tclass = tcc_binary;
2809 else if (tclass == tcc_expression && code == SAVE_EXPR
2810 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2812 /* If we've already found a CVAL1 or CVAL2, this expression is
2813 two complex to handle. */
2814 if (*cval1 || *cval2)
2824 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2827 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2828 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2829 cval1, cval2, save_p));
2834 case tcc_expression:
2835 if (code == COND_EXPR)
2836 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2837 cval1, cval2, save_p)
2838 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2839 cval1, cval2, save_p)
2840 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2841 cval1, cval2, save_p));
2844 case tcc_comparison:
2845 /* First see if we can handle the first operand, then the second. For
2846 the second operand, we know *CVAL1 can't be zero. It must be that
2847 one side of the comparison is each of the values; test for the
2848 case where this isn't true by failing if the two operands
2851 if (operand_equal_p (TREE_OPERAND (arg, 0),
2852 TREE_OPERAND (arg, 1), 0))
2856 *cval1 = TREE_OPERAND (arg, 0);
2857 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2859 else if (*cval2 == 0)
2860 *cval2 = TREE_OPERAND (arg, 0);
2861 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2866 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2868 else if (*cval2 == 0)
2869 *cval2 = TREE_OPERAND (arg, 1);
2870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2882 /* ARG is a tree that is known to contain just arithmetic operations and
2883 comparisons. Evaluate the operations in the tree substituting NEW0 for
2884 any occurrence of OLD0 as an operand of a comparison and likewise for
2888 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2889 tree old1, tree new1)
2891 tree type = TREE_TYPE (arg);
2892 enum tree_code code = TREE_CODE (arg);
2893 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2895 /* We can handle some of the tcc_expression cases here. */
2896 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2898 else if (tclass == tcc_expression
2899 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2900 tclass = tcc_binary;
2905 return fold_build1_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1));
2910 return fold_build2_loc (loc, code, type,
2911 eval_subst (loc, TREE_OPERAND (arg, 0),
2912 old0, new0, old1, new1),
2913 eval_subst (loc, TREE_OPERAND (arg, 1),
2914 old0, new0, old1, new1));
2916 case tcc_expression:
2920 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2924 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2928 return fold_build3_loc (loc, code, type,
2929 eval_subst (loc, TREE_OPERAND (arg, 0),
2930 old0, new0, old1, new1),
2931 eval_subst (loc, TREE_OPERAND (arg, 1),
2932 old0, new0, old1, new1),
2933 eval_subst (loc, TREE_OPERAND (arg, 2),
2934 old0, new0, old1, new1));
2938 /* Fall through - ??? */
2940 case tcc_comparison:
2942 tree arg0 = TREE_OPERAND (arg, 0);
2943 tree arg1 = TREE_OPERAND (arg, 1);
2945 /* We need to check both for exact equality and tree equality. The
2946 former will be true if the operand has a side-effect. In that
2947 case, we know the operand occurred exactly once. */
2949 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2951 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2954 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2956 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2959 return fold_build2_loc (loc, code, type, arg0, arg1);
2967 /* Return a tree for the case when the result of an expression is RESULT
2968 converted to TYPE and OMITTED was previously an operand of the expression
2969 but is now not needed (e.g., we folded OMITTED * 0).
2971 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2972 the conversion of RESULT to TYPE. */
2975 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2977 tree t = fold_convert_loc (loc, type, result);
2979 /* If the resulting operand is an empty statement, just return the omitted
2980 statement casted to void. */
2981 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2982 return build1_loc (loc, NOP_EXPR, void_type_node,
2983 fold_ignored_result (omitted));
2985 if (TREE_SIDE_EFFECTS (omitted))
2986 return build2_loc (loc, COMPOUND_EXPR, type,
2987 fold_ignored_result (omitted), t);
2989 return non_lvalue_loc (loc, t);
2992 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2995 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2998 tree t = fold_convert_loc (loc, type, result);
3000 /* If the resulting operand is an empty statement, just return the omitted
3001 statement casted to void. */
3002 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3003 return build1_loc (loc, NOP_EXPR, void_type_node,
3004 fold_ignored_result (omitted));
3006 if (TREE_SIDE_EFFECTS (omitted))
3007 return build2_loc (loc, COMPOUND_EXPR, type,
3008 fold_ignored_result (omitted), t);
3010 return pedantic_non_lvalue_loc (loc, t);
3013 /* Return a tree for the case when the result of an expression is RESULT
3014 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3015 of the expression but are now not needed.
3017 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3018 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3019 evaluated before OMITTED2. Otherwise, if neither has side effects,
3020 just do the conversion of RESULT to TYPE. */
3023 omit_two_operands_loc (location_t loc, tree type, tree result,
3024 tree omitted1, tree omitted2)
3026 tree t = fold_convert_loc (loc, type, result);
3028 if (TREE_SIDE_EFFECTS (omitted2))
3029 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3030 if (TREE_SIDE_EFFECTS (omitted1))
3031 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3033 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3037 /* Return a simplified tree node for the truth-negation of ARG. This
3038 never alters ARG itself. We assume that ARG is an operation that
3039 returns a truth value (0 or 1).
3041 FIXME: one would think we would fold the result, but it causes
3042 problems with the dominator optimizer. */
3045 fold_truth_not_expr (location_t loc, tree arg)
3047 tree type = TREE_TYPE (arg);
3048 enum tree_code code = TREE_CODE (arg);
3049 location_t loc1, loc2;
3051 /* If this is a comparison, we can simply invert it, except for
3052 floating-point non-equality comparisons, in which case we just
3053 enclose a TRUTH_NOT_EXPR around what we have. */
3055 if (TREE_CODE_CLASS (code) == tcc_comparison)
3057 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3058 if (FLOAT_TYPE_P (op_type)
3059 && flag_trapping_math
3060 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3061 && code != NE_EXPR && code != EQ_EXPR)
3064 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3065 if (code == ERROR_MARK)
3068 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3069 TREE_OPERAND (arg, 1));
3075 return constant_boolean_node (integer_zerop (arg), type);
3077 case TRUTH_AND_EXPR:
3078 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3079 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3080 return build2_loc (loc, TRUTH_OR_EXPR, type,
3081 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3082 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3085 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3086 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3087 return build2_loc (loc, TRUTH_AND_EXPR, type,
3088 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3089 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3091 case TRUTH_XOR_EXPR:
3092 /* Here we can invert either operand. We invert the first operand
3093 unless the second operand is a TRUTH_NOT_EXPR in which case our
3094 result is the XOR of the first operand with the inside of the
3095 negation of the second operand. */
3097 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3098 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3099 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3101 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3102 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3103 TREE_OPERAND (arg, 1));
3105 case TRUTH_ANDIF_EXPR:
3106 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3107 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3108 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3109 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3110 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3112 case TRUTH_ORIF_EXPR:
3113 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3114 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3115 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3116 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3117 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3119 case TRUTH_NOT_EXPR:
3120 return TREE_OPERAND (arg, 0);
3124 tree arg1 = TREE_OPERAND (arg, 1);
3125 tree arg2 = TREE_OPERAND (arg, 2);
3127 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3130 /* A COND_EXPR may have a throw as one operand, which
3131 then has void type. Just leave void operands
3133 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3134 VOID_TYPE_P (TREE_TYPE (arg1))
3135 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3136 VOID_TYPE_P (TREE_TYPE (arg2))
3137 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3141 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3142 return build2_loc (loc, COMPOUND_EXPR, type,
3143 TREE_OPERAND (arg, 0),
3144 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3146 case NON_LVALUE_EXPR:
3147 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3148 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3151 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3152 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3154 /* ... fall through ... */
3157 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3158 return build1_loc (loc, TREE_CODE (arg), type,
3159 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3162 if (!integer_onep (TREE_OPERAND (arg, 1)))
3164 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3167 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3169 case CLEANUP_POINT_EXPR:
3170 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3171 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3172 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3179 /* Return a simplified tree node for the truth-negation of ARG. This
3180 never alters ARG itself. We assume that ARG is an operation that
3181 returns a truth value (0 or 1).
3183 FIXME: one would think we would fold the result, but it causes
3184 problems with the dominator optimizer. */
3187 invert_truthvalue_loc (location_t loc, tree arg)
3191 if (TREE_CODE (arg) == ERROR_MARK)
3194 tem = fold_truth_not_expr (loc, arg);
3196 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3201 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3202 operands are another bit-wise operation with a common input. If so,
3203 distribute the bit operations to save an operation and possibly two if
3204 constants are involved. For example, convert
3205 (A | B) & (A | C) into A | (B & C)
3206 Further simplification will occur if B and C are constants.
3208 If this optimization cannot be done, 0 will be returned. */
3211 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3212 tree arg0, tree arg1)
3217 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3218 || TREE_CODE (arg0) == code
3219 || (TREE_CODE (arg0) != BIT_AND_EXPR
3220 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3223 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3225 common = TREE_OPERAND (arg0, 0);
3226 left = TREE_OPERAND (arg0, 1);
3227 right = TREE_OPERAND (arg1, 1);
3229 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3231 common = TREE_OPERAND (arg0, 0);
3232 left = TREE_OPERAND (arg0, 1);
3233 right = TREE_OPERAND (arg1, 0);
3235 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3237 common = TREE_OPERAND (arg0, 1);
3238 left = TREE_OPERAND (arg0, 0);
3239 right = TREE_OPERAND (arg1, 1);
3241 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3243 common = TREE_OPERAND (arg0, 1);
3244 left = TREE_OPERAND (arg0, 0);
3245 right = TREE_OPERAND (arg1, 0);
3250 common = fold_convert_loc (loc, type, common);
3251 left = fold_convert_loc (loc, type, left);
3252 right = fold_convert_loc (loc, type, right);
3253 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3254 fold_build2_loc (loc, code, type, left, right));
3257 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3258 with code CODE. This optimization is unsafe. */
3260 distribute_real_division (location_t loc, enum tree_code code, tree type,
3261 tree arg0, tree arg1)
3263 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3264 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3266 /* (A / C) +- (B / C) -> (A +- B) / C. */
3268 && operand_equal_p (TREE_OPERAND (arg0, 1),
3269 TREE_OPERAND (arg1, 1), 0))
3270 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3271 fold_build2_loc (loc, code, type,
3272 TREE_OPERAND (arg0, 0),
3273 TREE_OPERAND (arg1, 0)),
3274 TREE_OPERAND (arg0, 1));
3276 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3277 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3278 TREE_OPERAND (arg1, 0), 0)
3279 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3280 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3282 REAL_VALUE_TYPE r0, r1;
3283 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3284 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3286 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3288 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3289 real_arithmetic (&r0, code, &r0, &r1);
3290 return fold_build2_loc (loc, MULT_EXPR, type,
3291 TREE_OPERAND (arg0, 0),
3292 build_real (type, r0));
3298 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3299 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3302 make_bit_field_ref (location_t loc, tree inner, tree type,
3303 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3305 tree result, bftype;
3309 tree size = TYPE_SIZE (TREE_TYPE (inner));
3310 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3311 || POINTER_TYPE_P (TREE_TYPE (inner)))
3312 && host_integerp (size, 0)
3313 && tree_low_cst (size, 0) == bitsize)
3314 return fold_convert_loc (loc, type, inner);
3318 if (TYPE_PRECISION (bftype) != bitsize
3319 || TYPE_UNSIGNED (bftype) == !unsignedp)
3320 bftype = build_nonstandard_integer_type (bitsize, 0);
3322 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3323 size_int (bitsize), bitsize_int (bitpos));
3326 result = fold_convert_loc (loc, type, result);
3331 /* Optimize a bit-field compare.
3333 There are two cases: First is a compare against a constant and the
3334 second is a comparison of two items where the fields are at the same
3335 bit position relative to the start of a chunk (byte, halfword, word)
3336 large enough to contain it. In these cases we can avoid the shift
3337 implicit in bitfield extractions.
3339 For constants, we emit a compare of the shifted constant with the
3340 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3341 compared. For two fields at the same position, we do the ANDs with the
3342 similar mask and compare the result of the ANDs.
3344 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3345 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3346 are the left and right operands of the comparison, respectively.
3348 If the optimization described above can be done, we return the resulting
3349 tree. Otherwise we return zero. */
3352 optimize_bit_field_compare (location_t loc, enum tree_code code,
3353 tree compare_type, tree lhs, tree rhs)
3355 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3356 tree type = TREE_TYPE (lhs);
3357 tree signed_type, unsigned_type;
3358 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3359 enum machine_mode lmode, rmode, nmode;
3360 int lunsignedp, runsignedp;
3361 int lvolatilep = 0, rvolatilep = 0;
3362 tree linner, rinner = NULL_TREE;
3366 /* Get all the information about the extractions being done. If the bit size
3367 if the same as the size of the underlying object, we aren't doing an
3368 extraction at all and so can do nothing. We also don't want to
3369 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3370 then will no longer be able to replace it. */
3371 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3372 &lunsignedp, &lvolatilep, false);
3373 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3374 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3379 /* If this is not a constant, we can only do something if bit positions,
3380 sizes, and signedness are the same. */
3381 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3382 &runsignedp, &rvolatilep, false);
3384 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3385 || lunsignedp != runsignedp || offset != 0
3386 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3390 /* See if we can find a mode to refer to this field. We should be able to,
3391 but fail if we can't. */
3393 && GET_MODE_BITSIZE (lmode) > 0
3394 && flag_strict_volatile_bitfields > 0)
3397 nmode = get_best_mode (lbitsize, lbitpos,
3398 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3399 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3400 TYPE_ALIGN (TREE_TYPE (rinner))),
3401 word_mode, lvolatilep || rvolatilep);
3402 if (nmode == VOIDmode)
3405 /* Set signed and unsigned types of the precision of this mode for the
3407 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3408 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3410 /* Compute the bit position and size for the new reference and our offset
3411 within it. If the new reference is the same size as the original, we
3412 won't optimize anything, so return zero. */
3413 nbitsize = GET_MODE_BITSIZE (nmode);
3414 nbitpos = lbitpos & ~ (nbitsize - 1);
3416 if (nbitsize == lbitsize)
3419 if (BYTES_BIG_ENDIAN)
3420 lbitpos = nbitsize - lbitsize - lbitpos;
3422 /* Make the mask to be used against the extracted field. */
3423 mask = build_int_cst_type (unsigned_type, -1);
3424 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3425 mask = const_binop (RSHIFT_EXPR, mask,
3426 size_int (nbitsize - lbitsize - lbitpos));
3429 /* If not comparing with constant, just rework the comparison
3431 return fold_build2_loc (loc, code, compare_type,
3432 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3433 make_bit_field_ref (loc, linner,
3438 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3439 make_bit_field_ref (loc, rinner,
3445 /* Otherwise, we are handling the constant case. See if the constant is too
3446 big for the field. Warn and return a tree of for 0 (false) if so. We do
3447 this not only for its own sake, but to avoid having to test for this
3448 error case below. If we didn't, we might generate wrong code.
3450 For unsigned fields, the constant shifted right by the field length should
3451 be all zero. For signed fields, the high-order bits should agree with
3456 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3457 fold_convert_loc (loc,
3458 unsigned_type, rhs),
3459 size_int (lbitsize))))
3461 warning (0, "comparison is always %d due to width of bit-field",
3463 return constant_boolean_node (code == NE_EXPR, compare_type);