1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'.
41 Note: Since the folders get called on non-gimple code as well as
42 gimple code, we need to handle GIMPLE tuples as well as their
43 corresponding tree equivalents. */
47 #include "coretypes.h"
56 #include "diagnostic-core.h"
60 #include "langhooks.h"
63 #include "tree-flow.h"
65 /* Nonzero if we are folding constants inside an initializer; zero
67 int folding_initializer = 0;
69 /* The following constants represent a bit based encoding of GCC's
70 comparison operators. This encoding simplifies transformations
71 on relational comparison operators, such as AND and OR. */
72 enum comparison_code {
91 static bool negate_mathfn_p (enum built_in_function);
92 static bool negate_expr_p (tree);
93 static tree negate_expr (tree);
94 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
95 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
96 static tree const_binop (enum tree_code, tree, tree);
97 static enum comparison_code comparison_to_compcode (enum tree_code);
98 static enum tree_code compcode_to_comparison (enum comparison_code);
99 static int operand_equal_for_comparison_p (tree, tree, tree);
100 static int twoval_comparison_p (tree, tree *, tree *, int *);
101 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
102 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
103 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
104 static tree make_bit_field_ref (location_t, tree, tree,
105 HOST_WIDE_INT, HOST_WIDE_INT, int);
106 static tree optimize_bit_field_compare (location_t, enum tree_code,
108 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
110 enum machine_mode *, int *, int *,
112 static int all_ones_mask_p (const_tree, int);
113 static tree sign_bit_p (tree, const_tree);
114 static int simple_operand_p (const_tree);
115 static bool simple_operand_p_2 (tree);
116 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
117 static tree range_predecessor (tree);
118 static tree range_successor (tree);
119 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
120 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
121 static tree unextend (tree, int, int, tree);
122 static tree optimize_minmax_comparison (location_t, enum tree_code,
124 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
125 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
126 static tree fold_binary_op_with_conditional_arg (location_t,
127 enum tree_code, tree,
130 static tree fold_mathfn_compare (location_t,
131 enum built_in_function, enum tree_code,
133 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
135 static bool reorder_operands_p (const_tree, const_tree);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
141 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
142 Otherwise, return LOC. */
145 expr_location_or (tree t, location_t loc)
147 location_t tloc = EXPR_LOCATION (t);
148 return tloc != UNKNOWN_LOCATION ? tloc : loc;
151 /* Similar to protected_set_expr_location, but never modify x in place,
152 if location can and needs to be set, unshare it. */
155 protected_set_expr_location_unshare (tree x, location_t loc)
157 if (CAN_HAVE_LOCATION_P (x)
158 && EXPR_LOCATION (x) != loc
159 && !(TREE_CODE (x) == SAVE_EXPR
160 || TREE_CODE (x) == TARGET_EXPR
161 || TREE_CODE (x) == BIND_EXPR))
164 SET_EXPR_LOCATION (x, loc);
170 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
171 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
172 and SUM1. Then this yields nonzero if overflow occurred during the
175 Overflow occurs if A and B have the same sign, but A and SUM differ in
176 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
178 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
180 /* If ARG2 divides ARG1 with zero remainder, carries out the division
181 of type CODE and returns the quotient.
182 Otherwise returns NULL_TREE. */
185 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
190 /* The sign of the division is according to operand two, that
191 does the correct thing for POINTER_PLUS_EXPR where we want
192 a signed division. */
193 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
194 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
195 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
198 quo = double_int_divmod (tree_to_double_int (arg1),
199 tree_to_double_int (arg2),
202 if (double_int_zero_p (rem))
203 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
208 /* This is nonzero if we should defer warnings about undefined
209 overflow. This facility exists because these warnings are a
210 special case. The code to estimate loop iterations does not want
211 to issue any warnings, since it works with expressions which do not
212 occur in user code. Various bits of cleanup code call fold(), but
213 only use the result if it has certain characteristics (e.g., is a
214 constant); that code only wants to issue a warning if the result is
217 static int fold_deferring_overflow_warnings;
219 /* If a warning about undefined overflow is deferred, this is the
220 warning. Note that this may cause us to turn two warnings into
221 one, but that is fine since it is sufficient to only give one
222 warning per expression. */
224 static const char* fold_deferred_overflow_warning;
226 /* If a warning about undefined overflow is deferred, this is the
227 level at which the warning should be emitted. */
229 static enum warn_strict_overflow_code fold_deferred_overflow_code;
231 /* Start deferring overflow warnings. We could use a stack here to
232 permit nested calls, but at present it is not necessary. */
235 fold_defer_overflow_warnings (void)
237 ++fold_deferring_overflow_warnings;
240 /* Stop deferring overflow warnings. If there is a pending warning,
241 and ISSUE is true, then issue the warning if appropriate. STMT is
242 the statement with which the warning should be associated (used for
243 location information); STMT may be NULL. CODE is the level of the
244 warning--a warn_strict_overflow_code value. This function will use
245 the smaller of CODE and the deferred code when deciding whether to
246 issue the warning. CODE may be zero to mean to always use the
250 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
255 gcc_assert (fold_deferring_overflow_warnings > 0);
256 --fold_deferring_overflow_warnings;
257 if (fold_deferring_overflow_warnings > 0)
259 if (fold_deferred_overflow_warning != NULL
261 && code < (int) fold_deferred_overflow_code)
262 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
266 warnmsg = fold_deferred_overflow_warning;
267 fold_deferred_overflow_warning = NULL;
269 if (!issue || warnmsg == NULL)
272 if (gimple_no_warning_p (stmt))
275 /* Use the smallest code level when deciding to issue the
277 if (code == 0 || code > (int) fold_deferred_overflow_code)
278 code = fold_deferred_overflow_code;
280 if (!issue_strict_overflow_warning (code))
284 locus = input_location;
286 locus = gimple_location (stmt);
287 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
290 /* Stop deferring overflow warnings, ignoring any deferred
294 fold_undefer_and_ignore_overflow_warnings (void)
296 fold_undefer_overflow_warnings (false, NULL, 0);
299 /* Whether we are deferring overflow warnings. */
302 fold_deferring_overflow_warnings_p (void)
304 return fold_deferring_overflow_warnings > 0;
307 /* This is called when we fold something based on the fact that signed
308 overflow is undefined. */
311 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
313 if (fold_deferring_overflow_warnings > 0)
315 if (fold_deferred_overflow_warning == NULL
316 || wc < fold_deferred_overflow_code)
318 fold_deferred_overflow_warning = gmsgid;
319 fold_deferred_overflow_code = wc;
322 else if (issue_strict_overflow_warning (wc))
323 warning (OPT_Wstrict_overflow, gmsgid);
326 /* Return true if the built-in mathematical function specified by CODE
327 is odd, i.e. -f(x) == f(-x). */
330 negate_mathfn_p (enum built_in_function code)
334 CASE_FLT_FN (BUILT_IN_ASIN):
335 CASE_FLT_FN (BUILT_IN_ASINH):
336 CASE_FLT_FN (BUILT_IN_ATAN):
337 CASE_FLT_FN (BUILT_IN_ATANH):
338 CASE_FLT_FN (BUILT_IN_CASIN):
339 CASE_FLT_FN (BUILT_IN_CASINH):
340 CASE_FLT_FN (BUILT_IN_CATAN):
341 CASE_FLT_FN (BUILT_IN_CATANH):
342 CASE_FLT_FN (BUILT_IN_CBRT):
343 CASE_FLT_FN (BUILT_IN_CPROJ):
344 CASE_FLT_FN (BUILT_IN_CSIN):
345 CASE_FLT_FN (BUILT_IN_CSINH):
346 CASE_FLT_FN (BUILT_IN_CTAN):
347 CASE_FLT_FN (BUILT_IN_CTANH):
348 CASE_FLT_FN (BUILT_IN_ERF):
349 CASE_FLT_FN (BUILT_IN_LLROUND):
350 CASE_FLT_FN (BUILT_IN_LROUND):
351 CASE_FLT_FN (BUILT_IN_ROUND):
352 CASE_FLT_FN (BUILT_IN_SIN):
353 CASE_FLT_FN (BUILT_IN_SINH):
354 CASE_FLT_FN (BUILT_IN_TAN):
355 CASE_FLT_FN (BUILT_IN_TANH):
356 CASE_FLT_FN (BUILT_IN_TRUNC):
359 CASE_FLT_FN (BUILT_IN_LLRINT):
360 CASE_FLT_FN (BUILT_IN_LRINT):
361 CASE_FLT_FN (BUILT_IN_NEARBYINT):
362 CASE_FLT_FN (BUILT_IN_RINT):
363 return !flag_rounding_math;
371 /* Check whether we may negate an integer constant T without causing
375 may_negate_without_overflow_p (const_tree t)
377 unsigned HOST_WIDE_INT val;
381 gcc_assert (TREE_CODE (t) == INTEGER_CST);
383 type = TREE_TYPE (t);
384 if (TYPE_UNSIGNED (type))
387 prec = TYPE_PRECISION (type);
388 if (prec > HOST_BITS_PER_WIDE_INT)
390 if (TREE_INT_CST_LOW (t) != 0)
392 prec -= HOST_BITS_PER_WIDE_INT;
393 val = TREE_INT_CST_HIGH (t);
396 val = TREE_INT_CST_LOW (t);
397 if (prec < HOST_BITS_PER_WIDE_INT)
398 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
399 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
406 negate_expr_p (tree t)
413 type = TREE_TYPE (t);
416 switch (TREE_CODE (t))
419 if (TYPE_OVERFLOW_WRAPS (type))
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
433 /* We want to canonicalize to positive real constants. Pretend
434 that only negative ones can be easily negated. */
435 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
438 return negate_expr_p (TREE_REALPART (t))
439 && negate_expr_p (TREE_IMAGPART (t));
442 return negate_expr_p (TREE_OPERAND (t, 0))
443 && negate_expr_p (TREE_OPERAND (t, 1));
446 return negate_expr_p (TREE_OPERAND (t, 0));
449 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
450 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
452 /* -(A + B) -> (-B) - A. */
453 if (negate_expr_p (TREE_OPERAND (t, 1))
454 && reorder_operands_p (TREE_OPERAND (t, 0),
455 TREE_OPERAND (t, 1)))
457 /* -(A + B) -> (-A) - B. */
458 return negate_expr_p (TREE_OPERAND (t, 0));
461 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
462 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
463 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
464 && reorder_operands_p (TREE_OPERAND (t, 0),
465 TREE_OPERAND (t, 1));
468 if (TYPE_UNSIGNED (TREE_TYPE (t)))
474 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
484 /* In general we can't negate A / B, because if A is INT_MIN and
485 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 and actually traps on some architectures. But if overflow is
487 undefined, we can negate, because - (INT_MIN / 1) is an
489 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
490 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
492 return negate_expr_p (TREE_OPERAND (t, 1))
493 || negate_expr_p (TREE_OPERAND (t, 0));
496 /* Negate -((double)float) as (double)(-float). */
497 if (TREE_CODE (type) == REAL_TYPE)
499 tree tem = strip_float_extensions (t);
501 return negate_expr_p (tem);
506 /* Negate -f(x) as f(-x). */
507 if (negate_mathfn_p (builtin_mathfn_code (t)))
508 return negate_expr_p (CALL_EXPR_ARG (t, 0));
512 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
513 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
515 tree op1 = TREE_OPERAND (t, 1);
516 if (TREE_INT_CST_HIGH (op1) == 0
517 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
518 == TREE_INT_CST_LOW (op1))
529 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
530 simplification is possible.
531 If negate_expr_p would return true for T, NULL_TREE will never be
535 fold_negate_expr (location_t loc, tree t)
537 tree type = TREE_TYPE (t);
540 switch (TREE_CODE (t))
542 /* Convert - (~A) to A + 1. */
544 if (INTEGRAL_TYPE_P (type))
545 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
546 build_int_cst (type, 1));
550 tem = fold_negate_const (t, type);
551 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
552 || !TYPE_OVERFLOW_TRAPS (type))
557 tem = fold_negate_const (t, type);
558 /* Two's complement FP formats, such as c4x, may overflow. */
559 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
564 tem = fold_negate_const (t, type);
569 tree rpart = negate_expr (TREE_REALPART (t));
570 tree ipart = negate_expr (TREE_IMAGPART (t));
572 if ((TREE_CODE (rpart) == REAL_CST
573 && TREE_CODE (ipart) == REAL_CST)
574 || (TREE_CODE (rpart) == INTEGER_CST
575 && TREE_CODE (ipart) == INTEGER_CST))
576 return build_complex (type, rpart, ipart);
581 if (negate_expr_p (t))
582 return fold_build2_loc (loc, COMPLEX_EXPR, type,
583 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
584 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
588 if (negate_expr_p (t))
589 return fold_build1_loc (loc, CONJ_EXPR, type,
590 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
594 return TREE_OPERAND (t, 0);
597 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
598 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
600 /* -(A + B) -> (-B) - A. */
601 if (negate_expr_p (TREE_OPERAND (t, 1))
602 && reorder_operands_p (TREE_OPERAND (t, 0),
603 TREE_OPERAND (t, 1)))
605 tem = negate_expr (TREE_OPERAND (t, 1));
606 return fold_build2_loc (loc, MINUS_EXPR, type,
607 tem, TREE_OPERAND (t, 0));
610 /* -(A + B) -> (-A) - B. */
611 if (negate_expr_p (TREE_OPERAND (t, 0)))
613 tem = negate_expr (TREE_OPERAND (t, 0));
614 return fold_build2_loc (loc, MINUS_EXPR, type,
615 tem, TREE_OPERAND (t, 1));
621 /* - (A - B) -> B - A */
622 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
623 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
624 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
625 return fold_build2_loc (loc, MINUS_EXPR, type,
626 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
630 if (TYPE_UNSIGNED (type))
636 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
638 tem = TREE_OPERAND (t, 1);
639 if (negate_expr_p (tem))
640 return fold_build2_loc (loc, TREE_CODE (t), type,
641 TREE_OPERAND (t, 0), negate_expr (tem));
642 tem = TREE_OPERAND (t, 0);
643 if (negate_expr_p (tem))
644 return fold_build2_loc (loc, TREE_CODE (t), type,
645 negate_expr (tem), TREE_OPERAND (t, 1));
654 /* In general we can't negate A / B, because if A is INT_MIN and
655 B is 1, we may turn this into INT_MIN / -1 which is undefined
656 and actually traps on some architectures. But if overflow is
657 undefined, we can negate, because - (INT_MIN / 1) is an
659 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
661 const char * const warnmsg = G_("assuming signed overflow does not "
662 "occur when negating a division");
663 tem = TREE_OPERAND (t, 1);
664 if (negate_expr_p (tem))
666 if (INTEGRAL_TYPE_P (type)
667 && (TREE_CODE (tem) != INTEGER_CST
668 || integer_onep (tem)))
669 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 TREE_OPERAND (t, 0), negate_expr (tem));
673 tem = TREE_OPERAND (t, 0);
674 if (negate_expr_p (tem))
676 if (INTEGRAL_TYPE_P (type)
677 && (TREE_CODE (tem) != INTEGER_CST
678 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
679 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
680 return fold_build2_loc (loc, TREE_CODE (t), type,
681 negate_expr (tem), TREE_OPERAND (t, 1));
687 /* Convert -((double)float) into (double)(-float). */
688 if (TREE_CODE (type) == REAL_TYPE)
690 tem = strip_float_extensions (t);
691 if (tem != t && negate_expr_p (tem))
692 return fold_convert_loc (loc, type, negate_expr (tem));
697 /* Negate -f(x) as f(-x). */
698 if (negate_mathfn_p (builtin_mathfn_code (t))
699 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
703 fndecl = get_callee_fndecl (t);
704 arg = negate_expr (CALL_EXPR_ARG (t, 0));
705 return build_call_expr_loc (loc, fndecl, 1, arg);
710 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
711 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
713 tree op1 = TREE_OPERAND (t, 1);
714 if (TREE_INT_CST_HIGH (op1) == 0
715 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
716 == TREE_INT_CST_LOW (op1))
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
752 tem = fold_negate_expr (loc, t);
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead.
773 If IN is itself a literal or constant, return it as appropriate.
775 Note that we do not guarantee that any of the three values will be the
776 same type as IN, but they will have the same signedness and mode. */
779 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
780 tree *minus_litp, int negate_p)
788 /* Strip any conversions that don't change the machine mode or signedness. */
789 STRIP_SIGN_NOPS (in);
791 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
792 || TREE_CODE (in) == FIXED_CST)
794 else if (TREE_CODE (in) == code
795 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
796 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
797 /* We can associate addition and subtraction together (even
798 though the C standard doesn't say so) for integers because
799 the value is not affected. For reals, the value might be
800 affected, so we can't. */
801 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
802 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
804 tree op0 = TREE_OPERAND (in, 0);
805 tree op1 = TREE_OPERAND (in, 1);
806 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
807 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
809 /* First see if either of the operands is a literal, then a constant. */
810 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
811 || TREE_CODE (op0) == FIXED_CST)
812 *litp = op0, op0 = 0;
813 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
814 || TREE_CODE (op1) == FIXED_CST)
815 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
817 if (op0 != 0 && TREE_CONSTANT (op0))
818 *conp = op0, op0 = 0;
819 else if (op1 != 0 && TREE_CONSTANT (op1))
820 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
822 /* If we haven't dealt with either operand, this is not a case we can
823 decompose. Otherwise, VAR is either of the ones remaining, if any. */
824 if (op0 != 0 && op1 != 0)
829 var = op1, neg_var_p = neg1_p;
831 /* Now do any needed negations. */
833 *minus_litp = *litp, *litp = 0;
835 *conp = negate_expr (*conp);
837 var = negate_expr (var);
839 else if (TREE_CONSTANT (in))
847 *minus_litp = *litp, *litp = 0;
848 else if (*minus_litp)
849 *litp = *minus_litp, *minus_litp = 0;
850 *conp = negate_expr (*conp);
851 var = negate_expr (var);
857 /* Re-associate trees split by the above function. T1 and T2 are
858 either expressions to associate or null. Return the new
859 expression, if any. LOC is the location of the new expression. If
860 we build an operation, do it in TYPE and with CODE. */
863 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
870 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
871 try to fold this since we will have infinite recursion. But do
872 deal with any NEGATE_EXPRs. */
873 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
874 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
876 if (code == PLUS_EXPR)
878 if (TREE_CODE (t1) == NEGATE_EXPR)
879 return build2_loc (loc, MINUS_EXPR, type,
880 fold_convert_loc (loc, type, t2),
881 fold_convert_loc (loc, type,
882 TREE_OPERAND (t1, 0)));
883 else if (TREE_CODE (t2) == NEGATE_EXPR)
884 return build2_loc (loc, MINUS_EXPR, type,
885 fold_convert_loc (loc, type, t1),
886 fold_convert_loc (loc, type,
887 TREE_OPERAND (t2, 0)));
888 else if (integer_zerop (t2))
889 return fold_convert_loc (loc, type, t1);
891 else if (code == MINUS_EXPR)
893 if (integer_zerop (t2))
894 return fold_convert_loc (loc, type, t1);
897 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
898 fold_convert_loc (loc, type, t2));
901 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
902 fold_convert_loc (loc, type, t2));
905 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
906 for use in int_const_binop, size_binop and size_diffop. */
909 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
911 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
913 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
928 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
929 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
930 && TYPE_MODE (type1) == TYPE_MODE (type2);
934 /* Combine two integer constants ARG1 and ARG2 under operation CODE
935 to produce a new constant. Return NULL_TREE if we don't know how
936 to evaluate CODE at compile-time. */
939 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
941 double_int op1, op2, res, tmp;
943 tree type = TREE_TYPE (arg1);
944 bool uns = TYPE_UNSIGNED (type);
946 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
947 bool overflow = false;
949 op1 = tree_to_double_int (arg1);
950 op2 = tree_to_double_int (arg2);
955 res = double_int_ior (op1, op2);
959 res = double_int_xor (op1, op2);
963 res = double_int_and (op1, op2);
967 res = double_int_rshift (op1, double_int_to_shwi (op2),
968 TYPE_PRECISION (type), !uns);
972 /* It's unclear from the C standard whether shifts can overflow.
973 The following code ignores overflow; perhaps a C standard
974 interpretation ruling is needed. */
975 res = double_int_lshift (op1, double_int_to_shwi (op2),
976 TYPE_PRECISION (type), !uns);
980 res = double_int_rrotate (op1, double_int_to_shwi (op2),
981 TYPE_PRECISION (type));
985 res = double_int_lrotate (op1, double_int_to_shwi (op2),
986 TYPE_PRECISION (type));
990 overflow = add_double (op1.low, op1.high, op2.low, op2.high,
991 &res.low, &res.high);
995 neg_double (op2.low, op2.high, &res.low, &res.high);
996 add_double (op1.low, op1.high, res.low, res.high,
997 &res.low, &res.high);
998 overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
1002 overflow = mul_double (op1.low, op1.high, op2.low, op2.high,
1003 &res.low, &res.high);
1006 case TRUNC_DIV_EXPR:
1007 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1008 case EXACT_DIV_EXPR:
1009 /* This is a shortcut for a common special case. */
1010 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1011 && !TREE_OVERFLOW (arg1)
1012 && !TREE_OVERFLOW (arg2)
1013 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1015 if (code == CEIL_DIV_EXPR)
1016 op1.low += op2.low - 1;
1018 res.low = op1.low / op2.low, res.high = 0;
1022 /* ... fall through ... */
1024 case ROUND_DIV_EXPR:
1025 if (double_int_zero_p (op2))
1027 if (double_int_one_p (op2))
1032 if (double_int_equal_p (op1, op2)
1033 && ! double_int_zero_p (op1))
1035 res = double_int_one;
1038 overflow = div_and_round_double (code, uns,
1039 op1.low, op1.high, op2.low, op2.high,
1040 &res.low, &res.high,
1041 &tmp.low, &tmp.high);
1044 case TRUNC_MOD_EXPR:
1045 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1046 /* This is a shortcut for a common special case. */
1047 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1048 && !TREE_OVERFLOW (arg1)
1049 && !TREE_OVERFLOW (arg2)
1050 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1052 if (code == CEIL_MOD_EXPR)
1053 op1.low += op2.low - 1;
1054 res.low = op1.low % op2.low, res.high = 0;
1058 /* ... fall through ... */
1060 case ROUND_MOD_EXPR:
1061 if (double_int_zero_p (op2))
1063 overflow = div_and_round_double (code, uns,
1064 op1.low, op1.high, op2.low, op2.high,
1065 &tmp.low, &tmp.high,
1066 &res.low, &res.high);
1070 res = double_int_min (op1, op2, uns);
1074 res = double_int_max (op1, op2, uns);
1081 t = force_fit_type_double (TREE_TYPE (arg1), res, 1,
1082 ((!uns || is_sizetype) && overflow)
1083 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1088 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1089 constant. We assume ARG1 and ARG2 have the same data type, or at least
1090 are the same kind of constant and the same machine mode. Return zero if
1091 combining the constants is not allowed in the current operating mode. */
1094 const_binop (enum tree_code code, tree arg1, tree arg2)
1096 /* Sanity check for the recursive cases. */
1103 if (TREE_CODE (arg1) == INTEGER_CST)
1104 return int_const_binop (code, arg1, arg2);
1106 if (TREE_CODE (arg1) == REAL_CST)
1108 enum machine_mode mode;
1111 REAL_VALUE_TYPE value;
1112 REAL_VALUE_TYPE result;
1116 /* The following codes are handled by real_arithmetic. */
1131 d1 = TREE_REAL_CST (arg1);
1132 d2 = TREE_REAL_CST (arg2);
1134 type = TREE_TYPE (arg1);
1135 mode = TYPE_MODE (type);
1137 /* Don't perform operation if we honor signaling NaNs and
1138 either operand is a NaN. */
1139 if (HONOR_SNANS (mode)
1140 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1143 /* Don't perform operation if it would raise a division
1144 by zero exception. */
1145 if (code == RDIV_EXPR
1146 && REAL_VALUES_EQUAL (d2, dconst0)
1147 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1150 /* If either operand is a NaN, just return it. Otherwise, set up
1151 for floating-point trap; we return an overflow. */
1152 if (REAL_VALUE_ISNAN (d1))
1154 else if (REAL_VALUE_ISNAN (d2))
1157 inexact = real_arithmetic (&value, code, &d1, &d2);
1158 real_convert (&result, mode, &value);
1160 /* Don't constant fold this floating point operation if
1161 the result has overflowed and flag_trapping_math. */
1162 if (flag_trapping_math
1163 && MODE_HAS_INFINITIES (mode)
1164 && REAL_VALUE_ISINF (result)
1165 && !REAL_VALUE_ISINF (d1)
1166 && !REAL_VALUE_ISINF (d2))
1169 /* Don't constant fold this floating point operation if the
1170 result may dependent upon the run-time rounding mode and
1171 flag_rounding_math is set, or if GCC's software emulation
1172 is unable to accurately represent the result. */
1173 if ((flag_rounding_math
1174 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1175 && (inexact || !real_identical (&result, &value)))
1178 t = build_real (type, result);
1180 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1184 if (TREE_CODE (arg1) == FIXED_CST)
1186 FIXED_VALUE_TYPE f1;
1187 FIXED_VALUE_TYPE f2;
1188 FIXED_VALUE_TYPE result;
1193 /* The following codes are handled by fixed_arithmetic. */
1199 case TRUNC_DIV_EXPR:
1200 f2 = TREE_FIXED_CST (arg2);
1205 f2.data.high = TREE_INT_CST_HIGH (arg2);
1206 f2.data.low = TREE_INT_CST_LOW (arg2);
1214 f1 = TREE_FIXED_CST (arg1);
1215 type = TREE_TYPE (arg1);
1216 sat_p = TYPE_SATURATING (type);
1217 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1218 t = build_fixed (type, result);
1219 /* Propagate overflow flags. */
1220 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1221 TREE_OVERFLOW (t) = 1;
1225 if (TREE_CODE (arg1) == COMPLEX_CST)
1227 tree type = TREE_TYPE (arg1);
1228 tree r1 = TREE_REALPART (arg1);
1229 tree i1 = TREE_IMAGPART (arg1);
1230 tree r2 = TREE_REALPART (arg2);
1231 tree i2 = TREE_IMAGPART (arg2);
1238 real = const_binop (code, r1, r2);
1239 imag = const_binop (code, i1, i2);
1243 if (COMPLEX_FLOAT_TYPE_P (type))
1244 return do_mpc_arg2 (arg1, arg2, type,
1245 /* do_nonfinite= */ folding_initializer,
1248 real = const_binop (MINUS_EXPR,
1249 const_binop (MULT_EXPR, r1, r2),
1250 const_binop (MULT_EXPR, i1, i2));
1251 imag = const_binop (PLUS_EXPR,
1252 const_binop (MULT_EXPR, r1, i2),
1253 const_binop (MULT_EXPR, i1, r2));
1257 if (COMPLEX_FLOAT_TYPE_P (type))
1258 return do_mpc_arg2 (arg1, arg2, type,
1259 /* do_nonfinite= */ folding_initializer,
1262 case TRUNC_DIV_EXPR:
1264 case FLOOR_DIV_EXPR:
1265 case ROUND_DIV_EXPR:
1266 if (flag_complex_method == 0)
1268 /* Keep this algorithm in sync with
1269 tree-complex.c:expand_complex_div_straight().
1271 Expand complex division to scalars, straightforward algorithm.
1272 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1276 = const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r2, r2),
1278 const_binop (MULT_EXPR, i2, i2));
1280 = const_binop (PLUS_EXPR,
1281 const_binop (MULT_EXPR, r1, r2),
1282 const_binop (MULT_EXPR, i1, i2));
1284 = const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, i1, r2),
1286 const_binop (MULT_EXPR, r1, i2));
1288 real = const_binop (code, t1, magsquared);
1289 imag = const_binop (code, t2, magsquared);
1293 /* Keep this algorithm in sync with
1294 tree-complex.c:expand_complex_div_wide().
1296 Expand complex division to scalars, modified algorithm to minimize
1297 overflow with wide input ranges. */
1298 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1299 fold_abs_const (r2, TREE_TYPE (type)),
1300 fold_abs_const (i2, TREE_TYPE (type)));
1302 if (integer_nonzerop (compare))
1304 /* In the TRUE branch, we compute
1306 div = (br * ratio) + bi;
1307 tr = (ar * ratio) + ai;
1308 ti = (ai * ratio) - ar;
1311 tree ratio = const_binop (code, r2, i2);
1312 tree div = const_binop (PLUS_EXPR, i2,
1313 const_binop (MULT_EXPR, r2, ratio));
1314 real = const_binop (MULT_EXPR, r1, ratio);
1315 real = const_binop (PLUS_EXPR, real, i1);
1316 real = const_binop (code, real, div);
1318 imag = const_binop (MULT_EXPR, i1, ratio);
1319 imag = const_binop (MINUS_EXPR, imag, r1);
1320 imag = const_binop (code, imag, div);
1324 /* In the FALSE branch, we compute
1326 divisor = (d * ratio) + c;
1327 tr = (b * ratio) + a;
1328 ti = b - (a * ratio);
1331 tree ratio = const_binop (code, i2, r2);
1332 tree div = const_binop (PLUS_EXPR, r2,
1333 const_binop (MULT_EXPR, i2, ratio));
1335 real = const_binop (MULT_EXPR, i1, ratio);
1336 real = const_binop (PLUS_EXPR, real, r1);
1337 real = const_binop (code, real, div);
1339 imag = const_binop (MULT_EXPR, r1, ratio);
1340 imag = const_binop (MINUS_EXPR, i1, imag);
1341 imag = const_binop (code, imag, div);
1351 return build_complex (type, real, imag);
1354 if (TREE_CODE (arg1) == VECTOR_CST)
1356 tree type = TREE_TYPE(arg1);
1357 int count = TYPE_VECTOR_SUBPARTS (type), i;
1358 tree elements1, elements2, list = NULL_TREE;
1360 if(TREE_CODE(arg2) != VECTOR_CST)
1363 elements1 = TREE_VECTOR_CST_ELTS (arg1);
1364 elements2 = TREE_VECTOR_CST_ELTS (arg2);
1366 for (i = 0; i < count; i++)
1368 tree elem1, elem2, elem;
1370 /* The trailing elements can be empty and should be treated as 0 */
1372 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1375 elem1 = TREE_VALUE(elements1);
1376 elements1 = TREE_CHAIN (elements1);
1380 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1383 elem2 = TREE_VALUE(elements2);
1384 elements2 = TREE_CHAIN (elements2);
1387 elem = const_binop (code, elem1, elem2);
1389 /* It is possible that const_binop cannot handle the given
1390 code and return NULL_TREE */
1391 if(elem == NULL_TREE)
1394 list = tree_cons (NULL_TREE, elem, list);
1396 return build_vector(type, nreverse(list));
1401 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1402 indicates which particular sizetype to create. */
1405 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1407 return build_int_cst (sizetype_tab[(int) kind], number);
1410 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1411 is a tree code. The type of the result is taken from the operands.
1412 Both must be equivalent integer types, ala int_binop_types_match_p.
1413 If the operands are constant, so is the result. */
1416 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1418 tree type = TREE_TYPE (arg0);
1420 if (arg0 == error_mark_node || arg1 == error_mark_node)
1421 return error_mark_node;
1423 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1426 /* Handle the special case of two integer constants faster. */
1427 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1429 /* And some specific cases even faster than that. */
1430 if (code == PLUS_EXPR)
1432 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1434 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1437 else if (code == MINUS_EXPR)
1439 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1442 else if (code == MULT_EXPR)
1444 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1448 /* Handle general case of two integer constants. */
1449 return int_const_binop (code, arg0, arg1);
1452 return fold_build2_loc (loc, code, type, arg0, arg1);
1455 /* Given two values, either both of sizetype or both of bitsizetype,
1456 compute the difference between the two values. Return the value
1457 in signed type corresponding to the type of the operands. */
1460 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1462 tree type = TREE_TYPE (arg0);
1465 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1468 /* If the type is already signed, just do the simple thing. */
1469 if (!TYPE_UNSIGNED (type))
1470 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1472 if (type == sizetype)
1474 else if (type == bitsizetype)
1475 ctype = sbitsizetype;
1477 ctype = signed_type_for (type);
1479 /* If either operand is not a constant, do the conversions to the signed
1480 type and subtract. The hardware will do the right thing with any
1481 overflow in the subtraction. */
1482 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1483 return size_binop_loc (loc, MINUS_EXPR,
1484 fold_convert_loc (loc, ctype, arg0),
1485 fold_convert_loc (loc, ctype, arg1));
1487 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1488 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1489 overflow) and negate (which can't either). Special-case a result
1490 of zero while we're here. */
1491 if (tree_int_cst_equal (arg0, arg1))
1492 return build_int_cst (ctype, 0);
1493 else if (tree_int_cst_lt (arg1, arg0))
1494 return fold_convert_loc (loc, ctype,
1495 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1497 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1498 fold_convert_loc (loc, ctype,
1499 size_binop_loc (loc,
1504 /* A subroutine of fold_convert_const handling conversions of an
1505 INTEGER_CST to another integer type. */
1508 fold_convert_const_int_from_int (tree type, const_tree arg1)
1512 /* Given an integer constant, make new constant with new type,
1513 appropriately sign-extended or truncated. */
1514 t = force_fit_type_double (type, tree_to_double_int (arg1),
1515 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1516 (TREE_INT_CST_HIGH (arg1) < 0
1517 && (TYPE_UNSIGNED (type)
1518 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1519 | TREE_OVERFLOW (arg1));
1524 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1525 to an integer type. */
1528 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1533 /* The following code implements the floating point to integer
1534 conversion rules required by the Java Language Specification,
1535 that IEEE NaNs are mapped to zero and values that overflow
1536 the target precision saturate, i.e. values greater than
1537 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1538 are mapped to INT_MIN. These semantics are allowed by the
1539 C and C++ standards that simply state that the behavior of
1540 FP-to-integer conversion is unspecified upon overflow. */
1544 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1548 case FIX_TRUNC_EXPR:
1549 real_trunc (&r, VOIDmode, &x);
1556 /* If R is NaN, return zero and show we have an overflow. */
1557 if (REAL_VALUE_ISNAN (r))
1560 val = double_int_zero;
1563 /* See if R is less than the lower bound or greater than the
1568 tree lt = TYPE_MIN_VALUE (type);
1569 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1570 if (REAL_VALUES_LESS (r, l))
1573 val = tree_to_double_int (lt);
1579 tree ut = TYPE_MAX_VALUE (type);
1582 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1583 if (REAL_VALUES_LESS (u, r))
1586 val = tree_to_double_int (ut);
1592 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1594 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1598 /* A subroutine of fold_convert_const handling conversions of a
1599 FIXED_CST to an integer type. */
1602 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1605 double_int temp, temp_trunc;
1608 /* Right shift FIXED_CST to temp by fbit. */
1609 temp = TREE_FIXED_CST (arg1).data;
1610 mode = TREE_FIXED_CST (arg1).mode;
1611 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
1613 temp = double_int_rshift (temp, GET_MODE_FBIT (mode),
1614 HOST_BITS_PER_DOUBLE_INT,
1615 SIGNED_FIXED_POINT_MODE_P (mode));
1617 /* Left shift temp to temp_trunc by fbit. */
1618 temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode),
1619 HOST_BITS_PER_DOUBLE_INT,
1620 SIGNED_FIXED_POINT_MODE_P (mode));
1624 temp = double_int_zero;
1625 temp_trunc = double_int_zero;
1628 /* If FIXED_CST is negative, we need to round the value toward 0.
1629 By checking if the fractional bits are not zero to add 1 to temp. */
1630 if (SIGNED_FIXED_POINT_MODE_P (mode)
1631 && double_int_negative_p (temp_trunc)
1632 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
1633 temp = double_int_add (temp, double_int_one);
1635 /* Given a fixed-point constant, make new constant with new type,
1636 appropriately sign-extended or truncated. */
1637 t = force_fit_type_double (type, temp, -1,
1638 (double_int_negative_p (temp)
1639 && (TYPE_UNSIGNED (type)
1640 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1641 | TREE_OVERFLOW (arg1));
1646 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1647 to another floating point type. */
1650 fold_convert_const_real_from_real (tree type, const_tree arg1)
1652 REAL_VALUE_TYPE value;
1655 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1656 t = build_real (type, value);
1658 /* If converting an infinity or NAN to a representation that doesn't
1659 have one, set the overflow bit so that we can produce some kind of
1660 error message at the appropriate point if necessary. It's not the
1661 most user-friendly message, but it's better than nothing. */
1662 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1663 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1664 TREE_OVERFLOW (t) = 1;
1665 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1666 && !MODE_HAS_NANS (TYPE_MODE (type)))
1667 TREE_OVERFLOW (t) = 1;
1668 /* Regular overflow, conversion produced an infinity in a mode that
1669 can't represent them. */
1670 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1671 && REAL_VALUE_ISINF (value)
1672 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1673 TREE_OVERFLOW (t) = 1;
1675 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1679 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1680 to a floating point type. */
1683 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1685 REAL_VALUE_TYPE value;
1688 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1689 t = build_real (type, value);
1691 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1695 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1696 to another fixed-point type. */
1699 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1701 FIXED_VALUE_TYPE value;
1705 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1706 TYPE_SATURATING (type));
1707 t = build_fixed (type, value);
1709 /* Propagate overflow flags. */
1710 if (overflow_p | TREE_OVERFLOW (arg1))
1711 TREE_OVERFLOW (t) = 1;
1715 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1716 to a fixed-point type. */
1719 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1721 FIXED_VALUE_TYPE value;
1725 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1726 TREE_INT_CST (arg1),
1727 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1728 TYPE_SATURATING (type));
1729 t = build_fixed (type, value);
1731 /* Propagate overflow flags. */
1732 if (overflow_p | TREE_OVERFLOW (arg1))
1733 TREE_OVERFLOW (t) = 1;
1737 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1738 to a fixed-point type. */
1741 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1743 FIXED_VALUE_TYPE value;
1747 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1748 &TREE_REAL_CST (arg1),
1749 TYPE_SATURATING (type));
1750 t = build_fixed (type, value);
1752 /* Propagate overflow flags. */
1753 if (overflow_p | TREE_OVERFLOW (arg1))
1754 TREE_OVERFLOW (t) = 1;
1758 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1759 type TYPE. If no simplification can be done return NULL_TREE. */
1762 fold_convert_const (enum tree_code code, tree type, tree arg1)
1764 if (TREE_TYPE (arg1) == type)
1767 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1768 || TREE_CODE (type) == OFFSET_TYPE)
1770 if (TREE_CODE (arg1) == INTEGER_CST)
1771 return fold_convert_const_int_from_int (type, arg1);
1772 else if (TREE_CODE (arg1) == REAL_CST)
1773 return fold_convert_const_int_from_real (code, type, arg1);
1774 else if (TREE_CODE (arg1) == FIXED_CST)
1775 return fold_convert_const_int_from_fixed (type, arg1);
1777 else if (TREE_CODE (type) == REAL_TYPE)
1779 if (TREE_CODE (arg1) == INTEGER_CST)
1780 return build_real_from_int_cst (type, arg1);
1781 else if (TREE_CODE (arg1) == REAL_CST)
1782 return fold_convert_const_real_from_real (type, arg1);
1783 else if (TREE_CODE (arg1) == FIXED_CST)
1784 return fold_convert_const_real_from_fixed (type, arg1);
1786 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1788 if (TREE_CODE (arg1) == FIXED_CST)
1789 return fold_convert_const_fixed_from_fixed (type, arg1);
1790 else if (TREE_CODE (arg1) == INTEGER_CST)
1791 return fold_convert_const_fixed_from_int (type, arg1);
1792 else if (TREE_CODE (arg1) == REAL_CST)
1793 return fold_convert_const_fixed_from_real (type, arg1);
1798 /* Construct a vector of zero elements of vector type TYPE. */
1801 build_zero_vector (tree type)
1805 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1806 return build_vector_from_val (type, t);
1809 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1812 fold_convertible_p (const_tree type, const_tree arg)
1814 tree orig = TREE_TYPE (arg);
1819 if (TREE_CODE (arg) == ERROR_MARK
1820 || TREE_CODE (type) == ERROR_MARK
1821 || TREE_CODE (orig) == ERROR_MARK)
1824 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1827 switch (TREE_CODE (type))
1829 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1830 case POINTER_TYPE: case REFERENCE_TYPE:
1832 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1833 || TREE_CODE (orig) == OFFSET_TYPE)
1835 return (TREE_CODE (orig) == VECTOR_TYPE
1836 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1839 case FIXED_POINT_TYPE:
1843 return TREE_CODE (type) == TREE_CODE (orig);
1850 /* Convert expression ARG to type TYPE. Used by the middle-end for
1851 simple conversions in preference to calling the front-end's convert. */
1854 fold_convert_loc (location_t loc, tree type, tree arg)
1856 tree orig = TREE_TYPE (arg);
1862 if (TREE_CODE (arg) == ERROR_MARK
1863 || TREE_CODE (type) == ERROR_MARK
1864 || TREE_CODE (orig) == ERROR_MARK)
1865 return error_mark_node;
1867 switch (TREE_CODE (type))
1870 case REFERENCE_TYPE:
1871 /* Handle conversions between pointers to different address spaces. */
1872 if (POINTER_TYPE_P (orig)
1873 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1874 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1875 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1878 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1880 if (TREE_CODE (arg) == INTEGER_CST)
1882 tem = fold_convert_const (NOP_EXPR, type, arg);
1883 if (tem != NULL_TREE)
1886 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1887 || TREE_CODE (orig) == OFFSET_TYPE)
1888 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1889 if (TREE_CODE (orig) == COMPLEX_TYPE)
1890 return fold_convert_loc (loc, type,
1891 fold_build1_loc (loc, REALPART_EXPR,
1892 TREE_TYPE (orig), arg));
1893 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1894 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1895 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1898 if (TREE_CODE (arg) == INTEGER_CST)
1900 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1901 if (tem != NULL_TREE)
1904 else if (TREE_CODE (arg) == REAL_CST)
1906 tem = fold_convert_const (NOP_EXPR, type, arg);
1907 if (tem != NULL_TREE)
1910 else if (TREE_CODE (arg) == FIXED_CST)
1912 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1917 switch (TREE_CODE (orig))
1920 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1921 case POINTER_TYPE: case REFERENCE_TYPE:
1922 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1925 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1927 case FIXED_POINT_TYPE:
1928 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1931 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1932 return fold_convert_loc (loc, type, tem);
1938 case FIXED_POINT_TYPE:
1939 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1940 || TREE_CODE (arg) == REAL_CST)
1942 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1944 goto fold_convert_exit;
1947 switch (TREE_CODE (orig))
1949 case FIXED_POINT_TYPE:
1954 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1957 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1958 return fold_convert_loc (loc, type, tem);
1965 switch (TREE_CODE (orig))
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1971 case FIXED_POINT_TYPE:
1972 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1973 fold_convert_loc (loc, TREE_TYPE (type), arg),
1974 fold_convert_loc (loc, TREE_TYPE (type),
1975 integer_zero_node));
1980 if (TREE_CODE (arg) == COMPLEX_EXPR)
1982 rpart = fold_convert_loc (loc, TREE_TYPE (type),
1983 TREE_OPERAND (arg, 0));
1984 ipart = fold_convert_loc (loc, TREE_TYPE (type),
1985 TREE_OPERAND (arg, 1));
1986 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
1989 arg = save_expr (arg);
1990 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
1992 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
1993 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
1994 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2002 if (integer_zerop (arg))
2003 return build_zero_vector (type);
2004 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2005 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2006 || TREE_CODE (orig) == VECTOR_TYPE);
2007 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2010 tem = fold_ignored_result (arg);
2011 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2014 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2015 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2019 protected_set_expr_location_unshare (tem, loc);
2023 /* Return false if expr can be assumed not to be an lvalue, true
2027 maybe_lvalue_p (const_tree x)
2029 /* We only need to wrap lvalue tree codes. */
2030 switch (TREE_CODE (x))
2043 case ARRAY_RANGE_REF:
2049 case PREINCREMENT_EXPR:
2050 case PREDECREMENT_EXPR:
2052 case TRY_CATCH_EXPR:
2053 case WITH_CLEANUP_EXPR:
2062 /* Assume the worst for front-end tree codes. */
2063 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2071 /* Return an expr equal to X but certainly not valid as an lvalue. */
2074 non_lvalue_loc (location_t loc, tree x)
2076 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2081 if (! maybe_lvalue_p (x))
2083 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2086 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2087 Zero means allow extended lvalues. */
2089 int pedantic_lvalues;
2091 /* When pedantic, return an expr equal to X but certainly not valid as a
2092 pedantic lvalue. Otherwise, return X. */
2095 pedantic_non_lvalue_loc (location_t loc, tree x)
2097 if (pedantic_lvalues)
2098 return non_lvalue_loc (loc, x);
2100 return protected_set_expr_location_unshare (x, loc);
2103 /* Given a tree comparison code, return the code that is the logical inverse.
2104 It is generally not safe to do this for floating-point comparisons, except
2105 for EQ_EXPR and NE_EXPR, so we return ERROR_MARK in this case. */
2108 invert_tree_comparison (enum tree_code code, bool honor_nans)
2110 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR)
2120 return honor_nans ? UNLE_EXPR : LE_EXPR;
2122 return honor_nans ? UNLT_EXPR : LT_EXPR;
2124 return honor_nans ? UNGE_EXPR : GE_EXPR;
2126 return honor_nans ? UNGT_EXPR : GT_EXPR;
2140 return UNORDERED_EXPR;
2141 case UNORDERED_EXPR:
2142 return ORDERED_EXPR;
2148 /* Similar, but return the comparison that results if the operands are
2149 swapped. This is safe for floating-point. */
2152 swap_tree_comparison (enum tree_code code)
2159 case UNORDERED_EXPR:
2185 /* Convert a comparison tree code from an enum tree_code representation
2186 into a compcode bit-based encoding. This function is the inverse of
2187 compcode_to_comparison. */
2189 static enum comparison_code
2190 comparison_to_compcode (enum tree_code code)
2207 return COMPCODE_ORD;
2208 case UNORDERED_EXPR:
2209 return COMPCODE_UNORD;
2211 return COMPCODE_UNLT;
2213 return COMPCODE_UNEQ;
2215 return COMPCODE_UNLE;
2217 return COMPCODE_UNGT;
2219 return COMPCODE_LTGT;
2221 return COMPCODE_UNGE;
2227 /* Convert a compcode bit-based encoding of a comparison operator back
2228 to GCC's enum tree_code representation. This function is the
2229 inverse of comparison_to_compcode. */
2231 static enum tree_code
2232 compcode_to_comparison (enum comparison_code code)
2249 return ORDERED_EXPR;
2250 case COMPCODE_UNORD:
2251 return UNORDERED_EXPR;
2269 /* Return a tree for the comparison which is the combination of
2270 doing the AND or OR (depending on CODE) of the two operations LCODE
2271 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2272 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2273 if this makes the transformation invalid. */
2276 combine_comparisons (location_t loc,
2277 enum tree_code code, enum tree_code lcode,
2278 enum tree_code rcode, tree truth_type,
2279 tree ll_arg, tree lr_arg)
2281 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2282 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2283 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2288 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2289 compcode = lcompcode & rcompcode;
2292 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2293 compcode = lcompcode | rcompcode;
2302 /* Eliminate unordered comparisons, as well as LTGT and ORD
2303 which are not used unless the mode has NaNs. */
2304 compcode &= ~COMPCODE_UNORD;
2305 if (compcode == COMPCODE_LTGT)
2306 compcode = COMPCODE_NE;
2307 else if (compcode == COMPCODE_ORD)
2308 compcode = COMPCODE_TRUE;
2310 else if (flag_trapping_math)
2312 /* Check that the original operation and the optimized ones will trap
2313 under the same condition. */
2314 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2315 && (lcompcode != COMPCODE_EQ)
2316 && (lcompcode != COMPCODE_ORD);
2317 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2318 && (rcompcode != COMPCODE_EQ)
2319 && (rcompcode != COMPCODE_ORD);
2320 bool trap = (compcode & COMPCODE_UNORD) == 0
2321 && (compcode != COMPCODE_EQ)
2322 && (compcode != COMPCODE_ORD);
2324 /* In a short-circuited boolean expression the LHS might be
2325 such that the RHS, if evaluated, will never trap. For
2326 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2327 if neither x nor y is NaN. (This is a mixed blessing: for
2328 example, the expression above will never trap, hence
2329 optimizing it to x < y would be invalid). */
2330 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2331 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2334 /* If the comparison was short-circuited, and only the RHS
2335 trapped, we may now generate a spurious trap. */
2337 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2340 /* If we changed the conditions that cause a trap, we lose. */
2341 if ((ltrap || rtrap) != trap)
2345 if (compcode == COMPCODE_TRUE)
2346 return constant_boolean_node (true, truth_type);
2347 else if (compcode == COMPCODE_FALSE)
2348 return constant_boolean_node (false, truth_type);
2351 enum tree_code tcode;
2353 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2354 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2358 /* Return nonzero if two operands (typically of the same tree node)
2359 are necessarily equal. If either argument has side-effects this
2360 function returns zero. FLAGS modifies behavior as follows:
2362 If OEP_ONLY_CONST is set, only return nonzero for constants.
2363 This function tests whether the operands are indistinguishable;
2364 it does not test whether they are equal using C's == operation.
2365 The distinction is important for IEEE floating point, because
2366 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2367 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2369 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2370 even though it may hold multiple values during a function.
2371 This is because a GCC tree node guarantees that nothing else is
2372 executed between the evaluation of its "operands" (which may often
2373 be evaluated in arbitrary order). Hence if the operands themselves
2374 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2375 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2376 unset means assuming isochronic (or instantaneous) tree equivalence.
2377 Unless comparing arbitrary expression trees, such as from different
2378 statements, this flag can usually be left unset.
2380 If OEP_PURE_SAME is set, then pure functions with identical arguments
2381 are considered the same. It is used when the caller has other ways
2382 to ensure that global memory is unchanged in between. */
2385 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2387 /* If either is ERROR_MARK, they aren't equal. */
2388 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2389 || TREE_TYPE (arg0) == error_mark_node
2390 || TREE_TYPE (arg1) == error_mark_node)
2393 /* Similar, if either does not have a type (like a released SSA name),
2394 they aren't equal. */
2395 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2398 /* Check equality of integer constants before bailing out due to
2399 precision differences. */
2400 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2401 return tree_int_cst_equal (arg0, arg1);
2403 /* If both types don't have the same signedness, then we can't consider
2404 them equal. We must check this before the STRIP_NOPS calls
2405 because they may change the signedness of the arguments. As pointers
2406 strictly don't have a signedness, require either two pointers or
2407 two non-pointers as well. */
2408 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2409 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2412 /* We cannot consider pointers to different address space equal. */
2413 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2414 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2415 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2418 /* If both types don't have the same precision, then it is not safe
2420 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2426 /* In case both args are comparisons but with different comparison
2427 code, try to swap the comparison operands of one arg to produce
2428 a match and compare that variant. */
2429 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2430 && COMPARISON_CLASS_P (arg0)
2431 && COMPARISON_CLASS_P (arg1))
2433 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2435 if (TREE_CODE (arg0) == swap_code)
2436 return operand_equal_p (TREE_OPERAND (arg0, 0),
2437 TREE_OPERAND (arg1, 1), flags)
2438 && operand_equal_p (TREE_OPERAND (arg0, 1),
2439 TREE_OPERAND (arg1, 0), flags);
2442 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2443 /* This is needed for conversions and for COMPONENT_REF.
2444 Might as well play it safe and always test this. */
2445 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2446 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2447 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2450 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2451 We don't care about side effects in that case because the SAVE_EXPR
2452 takes care of that for us. In all other cases, two expressions are
2453 equal if they have no side effects. If we have two identical
2454 expressions with side effects that should be treated the same due
2455 to the only side effects being identical SAVE_EXPR's, that will
2456 be detected in the recursive calls below.
2457 If we are taking an invariant address of two identical objects
2458 they are necessarily equal as well. */
2459 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2460 && (TREE_CODE (arg0) == SAVE_EXPR
2461 || (flags & OEP_CONSTANT_ADDRESS_OF)
2462 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2465 /* Next handle constant cases, those for which we can return 1 even
2466 if ONLY_CONST is set. */
2467 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2468 switch (TREE_CODE (arg0))
2471 return tree_int_cst_equal (arg0, arg1);
2474 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2475 TREE_FIXED_CST (arg1));
2478 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2479 TREE_REAL_CST (arg1)))
2483 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2485 /* If we do not distinguish between signed and unsigned zero,
2486 consider them equal. */
2487 if (real_zerop (arg0) && real_zerop (arg1))
2496 v1 = TREE_VECTOR_CST_ELTS (arg0);
2497 v2 = TREE_VECTOR_CST_ELTS (arg1);
2500 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2503 v1 = TREE_CHAIN (v1);
2504 v2 = TREE_CHAIN (v2);
2511 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2513 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2517 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2518 && ! memcmp (TREE_STRING_POINTER (arg0),
2519 TREE_STRING_POINTER (arg1),
2520 TREE_STRING_LENGTH (arg0)));
2523 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2524 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2525 ? OEP_CONSTANT_ADDRESS_OF : 0);
2530 if (flags & OEP_ONLY_CONST)
2533 /* Define macros to test an operand from arg0 and arg1 for equality and a
2534 variant that allows null and views null as being different from any
2535 non-null value. In the latter case, if either is null, the both
2536 must be; otherwise, do the normal comparison. */
2537 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2538 TREE_OPERAND (arg1, N), flags)
2540 #define OP_SAME_WITH_NULL(N) \
2541 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2542 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2544 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2547 /* Two conversions are equal only if signedness and modes match. */
2548 switch (TREE_CODE (arg0))
2551 case FIX_TRUNC_EXPR:
2552 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2553 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2563 case tcc_comparison:
2565 if (OP_SAME (0) && OP_SAME (1))
2568 /* For commutative ops, allow the other order. */
2569 return (commutative_tree_code (TREE_CODE (arg0))
2570 && operand_equal_p (TREE_OPERAND (arg0, 0),
2571 TREE_OPERAND (arg1, 1), flags)
2572 && operand_equal_p (TREE_OPERAND (arg0, 1),
2573 TREE_OPERAND (arg1, 0), flags));
2576 /* If either of the pointer (or reference) expressions we are
2577 dereferencing contain a side effect, these cannot be equal. */
2578 if (TREE_SIDE_EFFECTS (arg0)
2579 || TREE_SIDE_EFFECTS (arg1))
2582 switch (TREE_CODE (arg0))
2590 /* Require equal access sizes, and similar pointer types.
2591 We can have incomplete types for array references of
2592 variable-sized arrays from the Fortran frontent
2594 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2595 || (TYPE_SIZE (TREE_TYPE (arg0))
2596 && TYPE_SIZE (TREE_TYPE (arg1))
2597 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2598 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2599 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2600 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2601 && OP_SAME (0) && OP_SAME (1));
2604 case ARRAY_RANGE_REF:
2605 /* Operands 2 and 3 may be null.
2606 Compare the array index by value if it is constant first as we
2607 may have different types but same value here. */
2609 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2610 TREE_OPERAND (arg1, 1))
2612 && OP_SAME_WITH_NULL (2)
2613 && OP_SAME_WITH_NULL (3));
2616 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2617 may be NULL when we're called to compare MEM_EXPRs. */
2618 return OP_SAME_WITH_NULL (0)
2620 && OP_SAME_WITH_NULL (2);
2623 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2629 case tcc_expression:
2630 switch (TREE_CODE (arg0))
2633 case TRUTH_NOT_EXPR:
2636 case TRUTH_ANDIF_EXPR:
2637 case TRUTH_ORIF_EXPR:
2638 return OP_SAME (0) && OP_SAME (1);
2641 case WIDEN_MULT_PLUS_EXPR:
2642 case WIDEN_MULT_MINUS_EXPR:
2645 /* The multiplcation operands are commutative. */
2648 case TRUTH_AND_EXPR:
2650 case TRUTH_XOR_EXPR:
2651 if (OP_SAME (0) && OP_SAME (1))
2654 /* Otherwise take into account this is a commutative operation. */
2655 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2656 TREE_OPERAND (arg1, 1), flags)
2657 && operand_equal_p (TREE_OPERAND (arg0, 1),
2658 TREE_OPERAND (arg1, 0), flags));
2663 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2670 switch (TREE_CODE (arg0))
2673 /* If the CALL_EXPRs call different functions, then they
2674 clearly can not be equal. */
2675 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2680 unsigned int cef = call_expr_flags (arg0);
2681 if (flags & OEP_PURE_SAME)
2682 cef &= ECF_CONST | ECF_PURE;
2689 /* Now see if all the arguments are the same. */
2691 const_call_expr_arg_iterator iter0, iter1;
2693 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2694 a1 = first_const_call_expr_arg (arg1, &iter1);
2696 a0 = next_const_call_expr_arg (&iter0),
2697 a1 = next_const_call_expr_arg (&iter1))
2698 if (! operand_equal_p (a0, a1, flags))
2701 /* If we get here and both argument lists are exhausted
2702 then the CALL_EXPRs are equal. */
2703 return ! (a0 || a1);
2709 case tcc_declaration:
2710 /* Consider __builtin_sqrt equal to sqrt. */
2711 return (TREE_CODE (arg0) == FUNCTION_DECL
2712 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2713 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2714 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2721 #undef OP_SAME_WITH_NULL
2724 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2725 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2727 When in doubt, return 0. */
2730 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2732 int unsignedp1, unsignedpo;
2733 tree primarg0, primarg1, primother;
2734 unsigned int correct_width;
2736 if (operand_equal_p (arg0, arg1, 0))
2739 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2740 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2743 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2744 and see if the inner values are the same. This removes any
2745 signedness comparison, which doesn't matter here. */
2746 primarg0 = arg0, primarg1 = arg1;
2747 STRIP_NOPS (primarg0);
2748 STRIP_NOPS (primarg1);
2749 if (operand_equal_p (primarg0, primarg1, 0))
2752 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2753 actual comparison operand, ARG0.
2755 First throw away any conversions to wider types
2756 already present in the operands. */
2758 primarg1 = get_narrower (arg1, &unsignedp1);
2759 primother = get_narrower (other, &unsignedpo);
2761 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2762 if (unsignedp1 == unsignedpo
2763 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2764 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2766 tree type = TREE_TYPE (arg0);
2768 /* Make sure shorter operand is extended the right way
2769 to match the longer operand. */
2770 primarg1 = fold_convert (signed_or_unsigned_type_for
2771 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2773 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2780 /* See if ARG is an expression that is either a comparison or is performing
2781 arithmetic on comparisons. The comparisons must only be comparing
2782 two different values, which will be stored in *CVAL1 and *CVAL2; if
2783 they are nonzero it means that some operands have already been found.
2784 No variables may be used anywhere else in the expression except in the
2785 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2786 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2788 If this is true, return 1. Otherwise, return zero. */
2791 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2793 enum tree_code code = TREE_CODE (arg);
2794 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2796 /* We can handle some of the tcc_expression cases here. */
2797 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2799 else if (tclass == tcc_expression
2800 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2801 || code == COMPOUND_EXPR))
2802 tclass = tcc_binary;
2804 else if (tclass == tcc_expression && code == SAVE_EXPR
2805 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2807 /* If we've already found a CVAL1 or CVAL2, this expression is
2808 two complex to handle. */
2809 if (*cval1 || *cval2)
2819 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2822 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2823 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2824 cval1, cval2, save_p));
2829 case tcc_expression:
2830 if (code == COND_EXPR)
2831 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2832 cval1, cval2, save_p)
2833 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2834 cval1, cval2, save_p)
2835 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2836 cval1, cval2, save_p));
2839 case tcc_comparison:
2840 /* First see if we can handle the first operand, then the second. For
2841 the second operand, we know *CVAL1 can't be zero. It must be that
2842 one side of the comparison is each of the values; test for the
2843 case where this isn't true by failing if the two operands
2846 if (operand_equal_p (TREE_OPERAND (arg, 0),
2847 TREE_OPERAND (arg, 1), 0))
2851 *cval1 = TREE_OPERAND (arg, 0);
2852 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2854 else if (*cval2 == 0)
2855 *cval2 = TREE_OPERAND (arg, 0);
2856 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2861 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2863 else if (*cval2 == 0)
2864 *cval2 = TREE_OPERAND (arg, 1);
2865 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2877 /* ARG is a tree that is known to contain just arithmetic operations and
2878 comparisons. Evaluate the operations in the tree substituting NEW0 for
2879 any occurrence of OLD0 as an operand of a comparison and likewise for
2883 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2884 tree old1, tree new1)
2886 tree type = TREE_TYPE (arg);
2887 enum tree_code code = TREE_CODE (arg);
2888 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2890 /* We can handle some of the tcc_expression cases here. */
2891 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2893 else if (tclass == tcc_expression
2894 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2895 tclass = tcc_binary;
2900 return fold_build1_loc (loc, code, type,
2901 eval_subst (loc, TREE_OPERAND (arg, 0),
2902 old0, new0, old1, new1));
2905 return fold_build2_loc (loc, code, type,
2906 eval_subst (loc, TREE_OPERAND (arg, 0),
2907 old0, new0, old1, new1),
2908 eval_subst (loc, TREE_OPERAND (arg, 1),
2909 old0, new0, old1, new1));
2911 case tcc_expression:
2915 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2919 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2923 return fold_build3_loc (loc, code, type,
2924 eval_subst (loc, TREE_OPERAND (arg, 0),
2925 old0, new0, old1, new1),
2926 eval_subst (loc, TREE_OPERAND (arg, 1),
2927 old0, new0, old1, new1),
2928 eval_subst (loc, TREE_OPERAND (arg, 2),
2929 old0, new0, old1, new1));
2933 /* Fall through - ??? */
2935 case tcc_comparison:
2937 tree arg0 = TREE_OPERAND (arg, 0);
2938 tree arg1 = TREE_OPERAND (arg, 1);
2940 /* We need to check both for exact equality and tree equality. The
2941 former will be true if the operand has a side-effect. In that
2942 case, we know the operand occurred exactly once. */
2944 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2946 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2949 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2951 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2954 return fold_build2_loc (loc, code, type, arg0, arg1);
2962 /* Return a tree for the case when the result of an expression is RESULT
2963 converted to TYPE and OMITTED was previously an operand of the expression
2964 but is now not needed (e.g., we folded OMITTED * 0).
2966 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2967 the conversion of RESULT to TYPE. */
2970 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
2972 tree t = fold_convert_loc (loc, type, result);
2974 /* If the resulting operand is an empty statement, just return the omitted
2975 statement casted to void. */
2976 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2977 return build1_loc (loc, NOP_EXPR, void_type_node,
2978 fold_ignored_result (omitted));
2980 if (TREE_SIDE_EFFECTS (omitted))
2981 return build2_loc (loc, COMPOUND_EXPR, type,
2982 fold_ignored_result (omitted), t);
2984 return non_lvalue_loc (loc, t);
2987 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2990 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
2993 tree t = fold_convert_loc (loc, type, result);
2995 /* If the resulting operand is an empty statement, just return the omitted
2996 statement casted to void. */
2997 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
2998 return build1_loc (loc, NOP_EXPR, void_type_node,
2999 fold_ignored_result (omitted));
3001 if (TREE_SIDE_EFFECTS (omitted))
3002 return build2_loc (loc, COMPOUND_EXPR, type,
3003 fold_ignored_result (omitted), t);
3005 return pedantic_non_lvalue_loc (loc, t);
3008 /* Return a tree for the case when the result of an expression is RESULT
3009 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3010 of the expression but are now not needed.
3012 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3013 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3014 evaluated before OMITTED2. Otherwise, if neither has side effects,
3015 just do the conversion of RESULT to TYPE. */
3018 omit_two_operands_loc (location_t loc, tree type, tree result,
3019 tree omitted1, tree omitted2)
3021 tree t = fold_convert_loc (loc, type, result);
3023 if (TREE_SIDE_EFFECTS (omitted2))
3024 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3025 if (TREE_SIDE_EFFECTS (omitted1))
3026 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3028 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3032 /* Return a simplified tree node for the truth-negation of ARG. This
3033 never alters ARG itself. We assume that ARG is an operation that
3034 returns a truth value (0 or 1).
3036 FIXME: one would think we would fold the result, but it causes
3037 problems with the dominator optimizer. */
3040 fold_truth_not_expr (location_t loc, tree arg)
3042 tree type = TREE_TYPE (arg);
3043 enum tree_code code = TREE_CODE (arg);
3044 location_t loc1, loc2;
3046 /* If this is a comparison, we can simply invert it, except for
3047 floating-point non-equality comparisons, in which case we just
3048 enclose a TRUTH_NOT_EXPR around what we have. */
3050 if (TREE_CODE_CLASS (code) == tcc_comparison)
3052 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3053 if (FLOAT_TYPE_P (op_type)
3054 && flag_trapping_math
3055 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3056 && code != NE_EXPR && code != EQ_EXPR)
3059 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3060 if (code == ERROR_MARK)
3063 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3064 TREE_OPERAND (arg, 1));
3070 return constant_boolean_node (integer_zerop (arg), type);
3072 case TRUTH_AND_EXPR:
3073 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3074 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3075 return build2_loc (loc, TRUTH_OR_EXPR, type,
3076 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3077 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3080 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3081 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3082 return build2_loc (loc, TRUTH_AND_EXPR, type,
3083 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3084 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3086 case TRUTH_XOR_EXPR:
3087 /* Here we can invert either operand. We invert the first operand
3088 unless the second operand is a TRUTH_NOT_EXPR in which case our
3089 result is the XOR of the first operand with the inside of the
3090 negation of the second operand. */
3092 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3093 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3094 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3096 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3097 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3098 TREE_OPERAND (arg, 1));
3100 case TRUTH_ANDIF_EXPR:
3101 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3102 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3103 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3104 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3105 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3107 case TRUTH_ORIF_EXPR:
3108 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3109 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3110 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3111 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3112 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3114 case TRUTH_NOT_EXPR:
3115 return TREE_OPERAND (arg, 0);
3119 tree arg1 = TREE_OPERAND (arg, 1);
3120 tree arg2 = TREE_OPERAND (arg, 2);
3122 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3123 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3125 /* A COND_EXPR may have a throw as one operand, which
3126 then has void type. Just leave void operands
3128 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3129 VOID_TYPE_P (TREE_TYPE (arg1))
3130 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3131 VOID_TYPE_P (TREE_TYPE (arg2))
3132 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3136 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3137 return build2_loc (loc, COMPOUND_EXPR, type,
3138 TREE_OPERAND (arg, 0),
3139 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3141 case NON_LVALUE_EXPR:
3142 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3143 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3146 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3147 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3149 /* ... fall through ... */
3152 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3153 return build1_loc (loc, TREE_CODE (arg), type,
3154 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3157 if (!integer_onep (TREE_OPERAND (arg, 1)))
3159 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3162 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3164 case CLEANUP_POINT_EXPR:
3165 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3166 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3167 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3174 /* Return a simplified tree node for the truth-negation of ARG. This
3175 never alters ARG itself. We assume that ARG is an operation that
3176 returns a truth value (0 or 1).
3178 FIXME: one would think we would fold the result, but it causes
3179 problems with the dominator optimizer. */
3182 invert_truthvalue_loc (location_t loc, tree arg)
3186 if (TREE_CODE (arg) == ERROR_MARK)
3189 tem = fold_truth_not_expr (loc, arg);
3191 tem = build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3196 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3197 operands are another bit-wise operation with a common input. If so,
3198 distribute the bit operations to save an operation and possibly two if
3199 constants are involved. For example, convert
3200 (A | B) & (A | C) into A | (B & C)
3201 Further simplification will occur if B and C are constants.
3203 If this optimization cannot be done, 0 will be returned. */
3206 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3207 tree arg0, tree arg1)
3212 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3213 || TREE_CODE (arg0) == code
3214 || (TREE_CODE (arg0) != BIT_AND_EXPR
3215 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3218 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3220 common = TREE_OPERAND (arg0, 0);
3221 left = TREE_OPERAND (arg0, 1);
3222 right = TREE_OPERAND (arg1, 1);
3224 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3226 common = TREE_OPERAND (arg0, 0);
3227 left = TREE_OPERAND (arg0, 1);
3228 right = TREE_OPERAND (arg1, 0);
3230 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3232 common = TREE_OPERAND (arg0, 1);
3233 left = TREE_OPERAND (arg0, 0);
3234 right = TREE_OPERAND (arg1, 1);
3236 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3238 common = TREE_OPERAND (arg0, 1);
3239 left = TREE_OPERAND (arg0, 0);
3240 right = TREE_OPERAND (arg1, 0);
3245 common = fold_convert_loc (loc, type, common);
3246 left = fold_convert_loc (loc, type, left);
3247 right = fold_convert_loc (loc, type, right);
3248 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3249 fold_build2_loc (loc, code, type, left, right));
3252 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3253 with code CODE. This optimization is unsafe. */
3255 distribute_real_division (location_t loc, enum tree_code code, tree type,
3256 tree arg0, tree arg1)
3258 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3259 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3261 /* (A / C) +- (B / C) -> (A +- B) / C. */
3263 && operand_equal_p (TREE_OPERAND (arg0, 1),
3264 TREE_OPERAND (arg1, 1), 0))
3265 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3266 fold_build2_loc (loc, code, type,
3267 TREE_OPERAND (arg0, 0),
3268 TREE_OPERAND (arg1, 0)),
3269 TREE_OPERAND (arg0, 1));
3271 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3272 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3273 TREE_OPERAND (arg1, 0), 0)
3274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3275 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3277 REAL_VALUE_TYPE r0, r1;
3278 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3279 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3281 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3283 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3284 real_arithmetic (&r0, code, &r0, &r1);
3285 return fold_build2_loc (loc, MULT_EXPR, type,
3286 TREE_OPERAND (arg0, 0),
3287 build_real (type, r0));
3293 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3294 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3297 make_bit_field_ref (location_t loc, tree inner, tree type,
3298 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3300 tree result, bftype;
3304 tree size = TYPE_SIZE (TREE_TYPE (inner));
3305 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3306 || POINTER_TYPE_P (TREE_TYPE (inner)))
3307 && host_integerp (size, 0)
3308 && tree_low_cst (size, 0) == bitsize)
3309 return fold_convert_loc (loc, type, inner);
3313 if (TYPE_PRECISION (bftype) != bitsize
3314 || TYPE_UNSIGNED (bftype) == !unsignedp)
3315 bftype = build_nonstandard_integer_type (bitsize, 0);
3317 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3318 size_int (bitsize), bitsize_int (bitpos));
3321 result = fold_convert_loc (loc, type, result);
3326 /* Optimize a bit-field compare.
3328 There are two cases: First is a compare against a constant and the
3329 second is a comparison of two items where the fields are at the same
3330 bit position relative to the start of a chunk (byte, halfword, word)
3331 large enough to contain it. In these cases we can avoid the shift
3332 implicit in bitfield extractions.
3334 For constants, we emit a compare of the shifted constant with the
3335 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3336 compared. For two fields at the same position, we do the ANDs with the
3337 similar mask and compare the result of the ANDs.
3339 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3340 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3341 are the left and right operands of the comparison, respectively.
3343 If the optimization described above can be done, we return the resulting
3344 tree. Otherwise we return zero. */
3347 optimize_bit_field_compare (location_t loc, enum tree_code code,
3348 tree compare_type, tree lhs, tree rhs)
3350 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3351 tree type = TREE_TYPE (lhs);
3352 tree signed_type, unsigned_type;
3353 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3354 enum machine_mode lmode, rmode, nmode;
3355 int lunsignedp, runsignedp;
3356 int lvolatilep = 0, rvolatilep = 0;
3357 tree linner, rinner = NULL_TREE;
3361 /* Get all the information about the extractions being done. If the bit size
3362 if the same as the size of the underlying object, we aren't doing an
3363 extraction at all and so can do nothing. We also don't want to
3364 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3365 then will no longer be able to replace it. */
3366 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3367 &lunsignedp, &lvolatilep, false);
3368 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3369 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3374 /* If this is not a constant, we can only do something if bit positions,
3375 sizes, and signedness are the same. */
3376 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3377 &runsignedp, &rvolatilep, false);
3379 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3380 || lunsignedp != runsignedp || offset != 0
3381 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3385 /* See if we can find a mode to refer to this field. We should be able to,
3386 but fail if we can't. */
3388 && GET_MODE_BITSIZE (lmode) > 0
3389 && flag_strict_volatile_bitfields > 0)
3392 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3393 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3394 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3395 TYPE_ALIGN (TREE_TYPE (rinner))),
3396 word_mode, lvolatilep || rvolatilep);
3397 if (nmode == VOIDmode)
3400 /* Set signed and unsigned types of the precision of this mode for the
3402 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3403 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3405 /* Compute the bit position and size for the new reference and our offset
3406 within it. If the new reference is the same size as the original, we
3407 won't optimize anything, so return zero. */
3408 nbitsize = GET_MODE_BITSIZE (nmode);
3409 nbitpos = lbitpos & ~ (nbitsize - 1);
3411 if (nbitsize == lbitsize)
3414 if (BYTES_BIG_ENDIAN)
3415 lbitpos = nbitsize - lbitsize - lbitpos;
3417 /* Make the mask to be used against the extracted field. */
3418 mask = build_int_cst_type (unsigned_type, -1);
3419 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3420 mask = const_binop (RSHIFT_EXPR, mask,
3421 size_int (nbitsize - lbitsize - lbitpos));
3424 /* If not comparing with constant, just rework the comparison
3426 return fold_build2_loc (loc, code, compare_type,
3427 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3428 make_bit_field_ref (loc, linner,
3433 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3434 make_bit_field_ref (loc, rinner,
3440 /* Otherwise, we are handling the constant case. See if the constant is too
3441 big for the field. Warn and return a tree of for 0 (false) if so. We do
3442 this not only for its own sake, but to avoid having to test for this
3443 error case below. If we didn't, we might generate wrong code.
3445 For unsigned fields, the constant shifted right by the field length should
3446 be all zero. For signed fields, the high-order bits should agree with
3451 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3452 fold_convert_loc (loc,
3453 unsigned_type, rhs),
3454 size_int (lbitsize))))
3456 warning (0, "comparison is always %d due to width of bit-field",
3458 return constant_boolean_node (code == NE_EXPR, compare_type);
3463 tree tem = const_binop (RSHIFT_EXPR,