X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ffold-const.c;h=a8b6f349d95f3acd83970b3eac959a938fb8e6ae;hb=5865514819db1b7b720f3b1da912b1758c39e2a0;hp=b04022e1086eac4b548996fc6f62d57a650c5828;hpb=91ceb6b76458690abf3303e716d1acd5506efec8;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/fold-const.c b/gcc/fold-const.c index b04022e1086..a8b6f349d95 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1,6 +1,7 @@ /* Fold a constant sub-tree into a single node for C-compiler Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, - 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc. + 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 + Free Software Foundation, Inc. This file is part of GCC. @@ -28,7 +29,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA @@ for cross-compilers. */ /* The entry points in this file are fold, size_int_wide, size_binop - and force_fit_type. + and force_fit_type_double. fold takes a tree as argument and returns a simplified tree. @@ -39,9 +40,13 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA size_int takes an integer value, and creates a tree constant with type from `sizetype'. - force_fit_type takes a constant, an overflowable flag and prior - overflow indicators. It forces the value to fit the type and sets - TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */ + force_fit_type_double takes a constant, an overflowable flag and a + prior overflow indicator. It forces the value to fit the type and + sets TREE_OVERFLOW. + + Note: Since the folders get called on non-gimple code as well as + gimple code, we need to handle GIMPLE tuples as well as their + corresponding tree equivalents. */ #include "config.h" #include "system.h" @@ -54,6 +59,7 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA #include "expr.h" #include "tm_p.h" #include "toplev.h" +#include "intl.h" #include "ggc.h" #include "hashtab.h" #include "langhooks.h" @@ -114,7 +120,7 @@ static int simple_operand_p (tree); static tree range_binop (enum tree_code, tree, tree, int, tree, int); static tree range_predecessor (tree); static tree range_successor (tree); -static tree make_range (tree, int *, tree *, tree *); +static tree make_range (tree, int *, tree *, tree *, bool *); static tree build_range_check (tree, tree, int, tree, tree); static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree, tree); @@ -123,9 +129,8 @@ static tree fold_cond_expr_with_comparison (tree, tree, tree, tree); static tree unextend (tree, int, int, tree); static tree fold_truthop (enum tree_code, tree, tree, tree); static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree); -static tree extract_muldiv (tree, tree, enum tree_code, tree); -static tree extract_muldiv_1 (tree, tree, enum tree_code, tree); -static int multiple_of_p (tree, tree, tree); +static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); +static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree, tree, tree, tree, int); @@ -188,58 +193,44 @@ decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low, *hi = words[2] + words[3] * BASE; } -/* T is an INT_CST node. OVERFLOWABLE indicates if we are interested - in overflow of the value, when >0 we are only interested in signed - overflow, for <0 we are interested in any overflow. OVERFLOWED - indicates whether overflow has already occurred. CONST_OVERFLOWED - indicates whether constant overflow has already occurred. We force - T's value to be within range of T's type (by setting to 0 or 1 all - the bits outside the type's range). We set TREE_OVERFLOWED if, - OVERFLOWED is nonzero, - or OVERFLOWABLE is >0 and signed overflow occurs - or OVERFLOWABLE is <0 and any overflow occurs - We set TREE_CONSTANT_OVERFLOWED if, - CONST_OVERFLOWED is nonzero - or we set TREE_OVERFLOWED. - We return either the original T, or a copy. */ +/* Force the double-word integer L1, H1 to be within the range of the + integer type TYPE. Stores the properly truncated and sign-extended + double-word integer in *LV, *HV. Returns true if the operation + overflows, that is, argument and result are different. */ -tree -force_fit_type (tree t, int overflowable, - bool overflowed, bool overflowed_const) +int +fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, + unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type) { - unsigned HOST_WIDE_INT low; - HOST_WIDE_INT high; + unsigned HOST_WIDE_INT low0 = l1; + HOST_WIDE_INT high0 = h1; unsigned int prec; int sign_extended_type; - gcc_assert (TREE_CODE (t) == INTEGER_CST); - - low = TREE_INT_CST_LOW (t); - high = TREE_INT_CST_HIGH (t); - - if (POINTER_TYPE_P (TREE_TYPE (t)) - || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE) + if (POINTER_TYPE_P (type) + || TREE_CODE (type) == OFFSET_TYPE) prec = POINTER_SIZE; else - prec = TYPE_PRECISION (TREE_TYPE (t)); + prec = TYPE_PRECISION (type); + /* Size types *are* sign extended. */ - sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t)) - || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE - && TYPE_IS_SIZETYPE (TREE_TYPE (t)))); + sign_extended_type = (!TYPE_UNSIGNED (type) + || (TREE_CODE (type) == INTEGER_TYPE + && TYPE_IS_SIZETYPE (type))); /* First clear all bits that are beyond the type's precision. */ - if (prec >= 2 * HOST_BITS_PER_WIDE_INT) ; else if (prec > HOST_BITS_PER_WIDE_INT) - high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); + h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); else { - high = 0; + h1 = 0; if (prec < HOST_BITS_PER_WIDE_INT) - low &= ~((HOST_WIDE_INT) (-1) << prec); + l1 &= ~((HOST_WIDE_INT) (-1) << prec); } + /* Then do sign extension if necessary. */ if (!sign_extended_type) /* No sign extension */; else if (prec >= 2 * HOST_BITS_PER_WIDE_INT) @@ -247,47 +238,80 @@ force_fit_type (tree t, int overflowable, else if (prec > HOST_BITS_PER_WIDE_INT) { /* Sign extend top half? */ - if (high & ((unsigned HOST_WIDE_INT)1 - << (prec - HOST_BITS_PER_WIDE_INT - 1))) - high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT); + if (h1 & ((unsigned HOST_WIDE_INT)1 + << (prec - HOST_BITS_PER_WIDE_INT - 1))) + h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT); } else if (prec == HOST_BITS_PER_WIDE_INT) { - if ((HOST_WIDE_INT)low < 0) - high = -1; + if ((HOST_WIDE_INT)l1 < 0) + h1 = -1; } else { /* Sign extend bottom half? */ - if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1))) + if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1))) { - high = -1; - low |= (HOST_WIDE_INT)(-1) << prec; + h1 = -1; + l1 |= (HOST_WIDE_INT)(-1) << prec; } } - /* If the value changed, return a new node. */ - if (overflowed || overflowed_const - || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t)) - { - t = build_int_cst_wide (TREE_TYPE (t), low, high); + *lv = l1; + *hv = h1; + + /* If the value didn't fit, signal overflow. */ + return l1 != low0 || h1 != high0; +} + +/* We force the double-int HIGH:LOW to the range of the type TYPE by + sign or zero extending it. + OVERFLOWABLE indicates if we are interested + in overflow of the value, when >0 we are only interested in signed + overflow, for <0 we are interested in any overflow. OVERFLOWED + indicates whether overflow has already occurred. CONST_OVERFLOWED + indicates whether constant overflow has already occurred. We force + T's value to be within range of T's type (by setting to 0 or 1 all + the bits outside the type's range). We set TREE_OVERFLOWED if, + OVERFLOWED is nonzero, + or OVERFLOWABLE is >0 and signed overflow occurs + or OVERFLOWABLE is <0 and any overflow occurs + We return a new tree node for the extended double-int. The node + is shared if no overflow flags are set. */ +tree +force_fit_type_double (tree type, unsigned HOST_WIDE_INT low, + HOST_WIDE_INT high, int overflowable, + bool overflowed) +{ + int sign_extended_type; + bool overflow; + + /* Size types *are* sign extended. */ + sign_extended_type = (!TYPE_UNSIGNED (type) + || (TREE_CODE (type) == INTEGER_TYPE + && TYPE_IS_SIZETYPE (type))); + + overflow = fit_double_type (low, high, &low, &high, type); + + /* If we need to set overflow flags, return a new unshared node. */ + if (overflowed || overflow) + { if (overflowed || overflowable < 0 || (overflowable > 0 && sign_extended_type)) { - t = copy_node (t); + tree t = make_node (INTEGER_CST); + TREE_INT_CST_LOW (t) = low; + TREE_INT_CST_HIGH (t) = high; + TREE_TYPE (t) = type; TREE_OVERFLOW (t) = 1; - TREE_CONSTANT_OVERFLOW (t) = 1; - } - else if (overflowed_const) - { - t = copy_node (t); - TREE_CONSTANT_OVERFLOW (t) = 1; + return t; } } - return t; + /* Else build a shared node. */ + return build_int_cst_wide (type, low, high); } /* Add two doubleword integers with doubleword result. @@ -877,6 +901,122 @@ div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2) return build_int_cst_wide (type, quol, quoh); } +/* This is non-zero if we should defer warnings about undefined + overflow. This facility exists because these warnings are a + special case. The code to estimate loop iterations does not want + to issue any warnings, since it works with expressions which do not + occur in user code. Various bits of cleanup code call fold(), but + only use the result if it has certain characteristics (e.g., is a + constant); that code only wants to issue a warning if the result is + used. */ + +static int fold_deferring_overflow_warnings; + +/* If a warning about undefined overflow is deferred, this is the + warning. Note that this may cause us to turn two warnings into + one, but that is fine since it is sufficient to only give one + warning per expression. */ + +static const char* fold_deferred_overflow_warning; + +/* If a warning about undefined overflow is deferred, this is the + level at which the warning should be emitted. */ + +static enum warn_strict_overflow_code fold_deferred_overflow_code; + +/* Start deferring overflow warnings. We could use a stack here to + permit nested calls, but at present it is not necessary. */ + +void +fold_defer_overflow_warnings (void) +{ + ++fold_deferring_overflow_warnings; +} + +/* Stop deferring overflow warnings. If there is a pending warning, + and ISSUE is true, then issue the warning if appropriate. STMT is + the statement with which the warning should be associated (used for + location information); STMT may be NULL. CODE is the level of the + warning--a warn_strict_overflow_code value. This function will use + the smaller of CODE and the deferred code when deciding whether to + issue the warning. CODE may be zero to mean to always use the + deferred code. */ + +void +fold_undefer_overflow_warnings (bool issue, tree stmt, int code) +{ + const char *warnmsg; + location_t locus; + + gcc_assert (fold_deferring_overflow_warnings > 0); + --fold_deferring_overflow_warnings; + if (fold_deferring_overflow_warnings > 0) + { + if (fold_deferred_overflow_warning != NULL + && code != 0 + && code < (int) fold_deferred_overflow_code) + fold_deferred_overflow_code = code; + return; + } + + warnmsg = fold_deferred_overflow_warning; + fold_deferred_overflow_warning = NULL; + + if (!issue || warnmsg == NULL) + return; + + /* Use the smallest code level when deciding to issue the + warning. */ + if (code == 0 || code > (int) fold_deferred_overflow_code) + code = fold_deferred_overflow_code; + + if (!issue_strict_overflow_warning (code)) + return; + + if (stmt == NULL_TREE || !expr_has_location (stmt)) + locus = input_location; + else + locus = expr_location (stmt); + warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg); +} + +/* Stop deferring overflow warnings, ignoring any deferred + warnings. */ + +void +fold_undefer_and_ignore_overflow_warnings (void) +{ + fold_undefer_overflow_warnings (false, NULL_TREE, 0); +} + +/* Whether we are deferring overflow warnings. */ + +bool +fold_deferring_overflow_warnings_p (void) +{ + return fold_deferring_overflow_warnings > 0; +} + +/* This is called when we fold something based on the fact that signed + overflow is undefined. */ + +static void +fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) +{ + gcc_assert (!flag_wrapv && !flag_trapv); + if (fold_deferring_overflow_warnings > 0) + { + if (fold_deferred_overflow_warning == NULL + || wc < fold_deferred_overflow_code) + { + fold_deferred_overflow_warning = gmsgid; + fold_deferred_overflow_code = wc; + } + } + else if (issue_strict_overflow_warning (wc)) + warning (OPT_Wstrict_overflow, gmsgid); +} + /* Return true if the built-in mathematical function specified by CODE is odd, i.e. -f(x) == f(-x). */ @@ -889,13 +1029,33 @@ negate_mathfn_p (enum built_in_function code) CASE_FLT_FN (BUILT_IN_ASINH): CASE_FLT_FN (BUILT_IN_ATAN): CASE_FLT_FN (BUILT_IN_ATANH): + CASE_FLT_FN (BUILT_IN_CASIN): + CASE_FLT_FN (BUILT_IN_CASINH): + CASE_FLT_FN (BUILT_IN_CATAN): + CASE_FLT_FN (BUILT_IN_CATANH): CASE_FLT_FN (BUILT_IN_CBRT): + CASE_FLT_FN (BUILT_IN_CPROJ): + CASE_FLT_FN (BUILT_IN_CSIN): + CASE_FLT_FN (BUILT_IN_CSINH): + CASE_FLT_FN (BUILT_IN_CTAN): + CASE_FLT_FN (BUILT_IN_CTANH): + CASE_FLT_FN (BUILT_IN_ERF): + CASE_FLT_FN (BUILT_IN_LLROUND): + CASE_FLT_FN (BUILT_IN_LROUND): + CASE_FLT_FN (BUILT_IN_ROUND): CASE_FLT_FN (BUILT_IN_SIN): CASE_FLT_FN (BUILT_IN_SINH): CASE_FLT_FN (BUILT_IN_TAN): CASE_FLT_FN (BUILT_IN_TANH): + CASE_FLT_FN (BUILT_IN_TRUNC): return true; + CASE_FLT_FN (BUILT_IN_LLRINT): + CASE_FLT_FN (BUILT_IN_LRINT): + CASE_FLT_FN (BUILT_IN_NEARBYINT): + CASE_FLT_FN (BUILT_IN_RINT): + return !flag_rounding_math; + default: break; } @@ -950,16 +1110,14 @@ negate_expr_p (tree t) switch (TREE_CODE (t)) { case INTEGER_CST: - if (TYPE_UNSIGNED (type) - || (flag_wrapv && ! flag_trapv)) + if (TYPE_OVERFLOW_WRAPS (type)) return true; /* Check that -CST will not overflow type. */ return may_negate_without_overflow_p (t); case BIT_NOT_EXPR: - return INTEGRAL_TYPE_P (type) - && (TYPE_UNSIGNED (type) - || (flag_wrapv && !flag_trapv)); + return (INTEGRAL_TYPE_P (type) + && TYPE_OVERFLOW_WRAPS (type)); case REAL_CST: case NEGATE_EXPR: @@ -969,8 +1127,16 @@ negate_expr_p (tree t) return negate_expr_p (TREE_REALPART (t)) && negate_expr_p (TREE_IMAGPART (t)); + case COMPLEX_EXPR: + return negate_expr_p (TREE_OPERAND (t, 0)) + && negate_expr_p (TREE_OPERAND (t, 1)); + + case CONJ_EXPR: + return negate_expr_p (TREE_OPERAND (t, 0)); + case PLUS_EXPR: - if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations) + if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) + || HONOR_SIGNED_ZEROS (TYPE_MODE (type))) return false; /* -(A + B) -> (-B) - A. */ if (negate_expr_p (TREE_OPERAND (t, 1)) @@ -982,7 +1148,8 @@ negate_expr_p (tree t) case MINUS_EXPR: /* We can't turn -(A-B) into B-A when we honor signed zeros. */ - return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) + return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) + && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)); @@ -1003,7 +1170,13 @@ negate_expr_p (tree t) case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR: case EXACT_DIV_EXPR: - if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv) + /* In general we can't negate A / B, because if A is INT_MIN and + B is 1, we may turn this into INT_MIN / -1 which is undefined + and actually traps on some architectures. But if overflow is + undefined, we can negate, because - (INT_MIN / 1) is an + overflow. */ + if (INTEGRAL_TYPE_P (TREE_TYPE (t)) + && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))) break; return negate_expr_p (TREE_OPERAND (t, 1)) || negate_expr_p (TREE_OPERAND (t, 0)); @@ -1021,7 +1194,7 @@ negate_expr_p (tree t) case CALL_EXPR: /* Negate -f(x) as f(-x). */ if (negate_mathfn_p (builtin_mathfn_code (t))) - return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))); + return negate_expr_p (CALL_EXPR_ARG (t, 0)); break; case RSHIFT_EXPR: @@ -1064,16 +1237,15 @@ fold_negate_expr (tree t) case INTEGER_CST: tem = fold_negate_const (t, type); - if (! TREE_OVERFLOW (tem) - || TYPE_UNSIGNED (type) - || ! flag_trapv) + if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) + || !TYPE_OVERFLOW_TRAPS (type)) return tem; break; case REAL_CST: tem = fold_negate_const (t, type); /* Two's complement FP formats, such as c4x, may overflow. */ - if (! TREE_OVERFLOW (tem) || ! flag_trapping_math) + if (!TREE_OVERFLOW (tem) || !flag_trapping_math) return tem; break; @@ -1090,11 +1262,25 @@ fold_negate_expr (tree t) } break; + case COMPLEX_EXPR: + if (negate_expr_p (t)) + return fold_build2 (COMPLEX_EXPR, type, + fold_negate_expr (TREE_OPERAND (t, 0)), + fold_negate_expr (TREE_OPERAND (t, 1))); + break; + + case CONJ_EXPR: + if (negate_expr_p (t)) + return fold_build1 (CONJ_EXPR, type, + fold_negate_expr (TREE_OPERAND (t, 0))); + break; + case NEGATE_EXPR: return TREE_OPERAND (t, 0); case PLUS_EXPR: - if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) + if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) + && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))) { /* -(A + B) -> (-B) - A. */ if (negate_expr_p (TREE_OPERAND (t, 1)) @@ -1118,7 +1304,8 @@ fold_negate_expr (tree t) case MINUS_EXPR: /* - (A - B) -> B - A */ - if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations) + if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) + && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1))) return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); @@ -1149,16 +1336,35 @@ fold_negate_expr (tree t) case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR: case EXACT_DIV_EXPR: - if (!TYPE_UNSIGNED (type) && !flag_wrapv) + /* In general we can't negate A / B, because if A is INT_MIN and + B is 1, we may turn this into INT_MIN / -1 which is undefined + and actually traps on some architectures. But if overflow is + undefined, we can negate, because - (INT_MIN / 1) is an + overflow. */ + if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) { + const char * const warnmsg = G_("assuming signed overflow does not " + "occur when negating a division"); tem = TREE_OPERAND (t, 1); if (negate_expr_p (tem)) - return fold_build2 (TREE_CODE (t), type, - TREE_OPERAND (t, 0), negate_expr (tem)); + { + if (INTEGRAL_TYPE_P (type) + && (TREE_CODE (tem) != INTEGER_CST + || integer_onep (tem))) + fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); + return fold_build2 (TREE_CODE (t), type, + TREE_OPERAND (t, 0), negate_expr (tem)); + } tem = TREE_OPERAND (t, 0); if (negate_expr_p (tem)) - return fold_build2 (TREE_CODE (t), type, - negate_expr (tem), TREE_OPERAND (t, 1)); + { + if (INTEGRAL_TYPE_P (type) + && (TREE_CODE (tem) != INTEGER_CST + || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type)))) + fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); + return fold_build2 (TREE_CODE (t), type, + negate_expr (tem), TREE_OPERAND (t, 1)); + } } break; @@ -1175,14 +1381,13 @@ fold_negate_expr (tree t) case CALL_EXPR: /* Negate -f(x) as f(-x). */ if (negate_mathfn_p (builtin_mathfn_code (t)) - && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)))) + && negate_expr_p (CALL_EXPR_ARG (t, 0))) { - tree fndecl, arg, arglist; + tree fndecl, arg; fndecl = get_callee_fndecl (t); - arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1))); - arglist = build_tree_list (NULL_TREE, arg); - return build_function_call_expr (fndecl, arglist); + arg = negate_expr (CALL_EXPR_ARG (t, 0)); + return build_call_expr (fndecl, 1, arg); } break; @@ -1371,6 +1576,35 @@ associate_trees (tree t1, tree t2, enum tree_code code, tree type) fold_convert (type, t2)); } +/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable + for use in int_const_binop, size_binop and size_diffop. */ + +static bool +int_binop_types_match_p (enum tree_code code, tree type1, tree type2) +{ + if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1)) + return false; + if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2)) + return false; + + switch (code) + { + case LSHIFT_EXPR: + case RSHIFT_EXPR: + case LROTATE_EXPR: + case RROTATE_EXPR: + return true; + + default: + break; + } + + return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2) + && TYPE_PRECISION (type1) == TYPE_PRECISION (type2) + && TYPE_MODE (type1) == TYPE_MODE (type2); +} + + /* Combine two integer constants ARG1 and ARG2 under operation CODE to produce a new constant. Return NULL_TREE if we don't know how to evaluate CODE at compile-time. @@ -1448,8 +1682,8 @@ int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) case EXACT_DIV_EXPR: /* This is a shortcut for a common special case. */ if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 - && ! TREE_CONSTANT_OVERFLOW (arg1) - && ! TREE_CONSTANT_OVERFLOW (arg2) + && !TREE_OVERFLOW (arg1) + && !TREE_OVERFLOW (arg2) && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) { if (code == CEIL_DIV_EXPR) @@ -1483,8 +1717,8 @@ int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR: /* This is a shortcut for a common special case. */ if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 - && ! TREE_CONSTANT_OVERFLOW (arg1) - && ! TREE_CONSTANT_OVERFLOW (arg2) + && !TREE_OVERFLOW (arg1) + && !TREE_OVERFLOW (arg2) && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) { if (code == CEIL_MOD_EXPR) @@ -1525,30 +1759,22 @@ int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) return NULL_TREE; } - t = build_int_cst_wide (TREE_TYPE (arg1), low, hi); - if (notrunc) { + t = build_int_cst_wide (TREE_TYPE (arg1), low, hi); + /* Propagate overflow flags ourselves. */ if (((!uns || is_sizetype) && overflow) | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)) { t = copy_node (t); TREE_OVERFLOW (t) = 1; - TREE_CONSTANT_OVERFLOW (t) = 1; - } - else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2)) - { - t = copy_node (t); - TREE_CONSTANT_OVERFLOW (t) = 1; } } else - t = force_fit_type (t, 1, - ((!uns || is_sizetype) && overflow) - | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2), - TREE_CONSTANT_OVERFLOW (arg1) - | TREE_CONSTANT_OVERFLOW (arg2)); + t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1, + ((!uns || is_sizetype) && overflow) + | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)); return t; } @@ -1649,10 +1875,6 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) t = build_real (type, result); TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2); - TREE_CONSTANT_OVERFLOW (t) - = TREE_OVERFLOW (t) - | TREE_CONSTANT_OVERFLOW (arg1) - | TREE_CONSTANT_OVERFLOW (arg2); return t; } @@ -1732,7 +1954,7 @@ size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE is a tree code. The type of the result is taken from the operands. - Both must be the same type integer type and it must be a size type. + Both must be equivalent integer types, ala int_binop_types_match_p. If the operands are constant, so is the result. */ tree @@ -1743,20 +1965,30 @@ size_binop (enum tree_code code, tree arg0, tree arg1) if (arg0 == error_mark_node || arg1 == error_mark_node) return error_mark_node; - gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) - && type == TREE_TYPE (arg1)); + gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0), + TREE_TYPE (arg1))); /* Handle the special case of two integer constants faster. */ if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) { /* And some specific cases even faster than that. */ - if (code == PLUS_EXPR && integer_zerop (arg0)) - return arg1; - else if ((code == MINUS_EXPR || code == PLUS_EXPR) - && integer_zerop (arg1)) - return arg0; - else if (code == MULT_EXPR && integer_onep (arg0)) - return arg1; + if (code == PLUS_EXPR) + { + if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0)) + return arg1; + if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) + return arg0; + } + else if (code == MINUS_EXPR) + { + if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1)) + return arg0; + } + else if (code == MULT_EXPR) + { + if (integer_onep (arg0) && !TREE_OVERFLOW (arg0)) + return arg1; + } /* Handle general case of two integer constants. */ return int_const_binop (code, arg0, arg1, 0); @@ -1775,14 +2007,19 @@ size_diffop (tree arg0, tree arg1) tree type = TREE_TYPE (arg0); tree ctype; - gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type) - && type == TREE_TYPE (arg1)); + gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0), + TREE_TYPE (arg1))); /* If the type is already signed, just do the simple thing. */ if (!TYPE_UNSIGNED (type)) return size_binop (MINUS_EXPR, arg0, arg1); - ctype = type == bitsizetype ? sbitsizetype : ssizetype; + if (type == sizetype) + ctype = ssizetype; + else if (type == bitsizetype) + ctype = sbitsizetype; + else + ctype = lang_hooks.types.signed_type (type); /* If either operand is not a constant, do the conversions to the signed type and subtract. The hardware will do the right thing with any @@ -1815,18 +2052,15 @@ fold_convert_const_int_from_int (tree type, tree arg1) /* Given an integer constant, make new constant with new type, appropriately sign-extended or truncated. */ - t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1), - TREE_INT_CST_HIGH (arg1)); - - t = force_fit_type (t, - /* Don't set the overflow when - converting a pointer */ - !POINTER_TYPE_P (TREE_TYPE (arg1)), - (TREE_INT_CST_HIGH (arg1) < 0 - && (TYPE_UNSIGNED (type) - < TYPE_UNSIGNED (TREE_TYPE (arg1)))) - | TREE_OVERFLOW (arg1), - TREE_CONSTANT_OVERFLOW (arg1)); + t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1), + TREE_INT_CST_HIGH (arg1), + /* Don't set the overflow when + converting a pointer */ + !POINTER_TYPE_P (TREE_TYPE (arg1)), + (TREE_INT_CST_HIGH (arg1) < 0 + && (TYPE_UNSIGNED (type) + < TYPE_UNSIGNED (TREE_TYPE (arg1)))) + | TREE_OVERFLOW (arg1)); return t; } @@ -1859,18 +2093,6 @@ fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1) real_trunc (&r, VOIDmode, &x); break; - case FIX_CEIL_EXPR: - real_ceil (&r, VOIDmode, &x); - break; - - case FIX_FLOOR_EXPR: - real_floor (&r, VOIDmode, &x); - break; - - case FIX_ROUND_EXPR: - real_round (&r, VOIDmode, &x); - break; - default: gcc_unreachable (); } @@ -1916,10 +2138,8 @@ fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1) if (! overflow) REAL_VALUE_TO_INT (&low, &high, r); - t = build_int_cst_wide (type, low, high); - - t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1), - TREE_CONSTANT_OVERFLOW (arg1)); + t = force_fit_type_double (type, low, high, -1, + overflow | TREE_OVERFLOW (arg1)); return t; } @@ -1936,8 +2156,6 @@ fold_convert_const_real_from_real (tree type, tree arg1) t = build_real (type, value); TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1); - TREE_CONSTANT_OVERFLOW (t) - = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1); return t; } @@ -2103,7 +2321,10 @@ fold_convert (tree type, tree arg) return fold_build1 (VIEW_CONVERT_EXPR, type, arg); case VOID_TYPE: - return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg)); + tem = fold_ignored_result (arg); + if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT) + return tem; + return fold_build1 (NOP_EXPR, type, tem); default: gcc_unreachable (); @@ -2144,6 +2365,7 @@ maybe_lvalue_p (tree x) case WITH_CLEANUP_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR: + case GIMPLE_MODIFY_STMT: case TARGET_EXPR: case COND_EXPR: case BIND_EXPR: @@ -2544,24 +2766,27 @@ operand_equal_p (tree arg0, tree arg1, unsigned int flags) switch (TREE_CODE (arg0)) { case INTEGER_CST: - return (! TREE_CONSTANT_OVERFLOW (arg0) - && ! TREE_CONSTANT_OVERFLOW (arg1) - && tree_int_cst_equal (arg0, arg1)); + return tree_int_cst_equal (arg0, arg1); case REAL_CST: - return (! TREE_CONSTANT_OVERFLOW (arg0) - && ! TREE_CONSTANT_OVERFLOW (arg1) - && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0), - TREE_REAL_CST (arg1))); + if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0), + TREE_REAL_CST (arg1))) + return 1; + + + if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))) + { + /* If we do not distinguish between signed and unsigned zero, + consider them equal. */ + if (real_zerop (arg0) && real_zerop (arg1)) + return 1; + } + return 0; case VECTOR_CST: { tree v1, v2; - if (TREE_CONSTANT_OVERFLOW (arg0) - || TREE_CONSTANT_OVERFLOW (arg1)) - return 0; - v1 = TREE_VECTOR_CST_ELTS (arg0); v2 = TREE_VECTOR_CST_ELTS (arg1); while (v1 && v2) @@ -2617,10 +2842,7 @@ operand_equal_p (tree arg0, tree arg1, unsigned int flags) { case NOP_EXPR: case CONVERT_EXPR: - case FIX_CEIL_EXPR: case FIX_TRUNC_EXPR: - case FIX_FLOOR_EXPR: - case FIX_ROUND_EXPR: if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))) return 0; @@ -2705,10 +2927,18 @@ operand_equal_p (tree arg0, tree arg1, unsigned int flags) && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), flags)); + default: + return 0; + } + + case tcc_vl_exp: + switch (TREE_CODE (arg0)) + { case CALL_EXPR: /* If the CALL_EXPRs call different functions, then they clearly can not be equal. */ - if (!OP_SAME (0)) + if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1), + flags)) return 0; { @@ -2721,25 +2951,22 @@ operand_equal_p (tree arg0, tree arg1, unsigned int flags) return 0; } - /* Now see if all the arguments are the same. operand_equal_p - does not handle TREE_LIST, so we walk the operands here - feeding them to operand_equal_p. */ - arg0 = TREE_OPERAND (arg0, 1); - arg1 = TREE_OPERAND (arg1, 1); - while (arg0 && arg1) - { - if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), - flags)) + /* Now see if all the arguments are the same. */ + { + call_expr_arg_iterator iter0, iter1; + tree a0, a1; + for (a0 = first_call_expr_arg (arg0, &iter0), + a1 = first_call_expr_arg (arg1, &iter1); + a0 && a1; + a0 = next_call_expr_arg (&iter0), + a1 = next_call_expr_arg (&iter1)) + if (! operand_equal_p (a0, a1, flags)) return 0; - arg0 = TREE_CHAIN (arg0); - arg1 = TREE_CHAIN (arg1); - } - - /* If we get here and both argument lists are exhausted - then the CALL_EXPRs are equal. */ - return ! (arg0 || arg1); - + /* If we get here and both argument lists are exhausted + then the CALL_EXPRs are equal. */ + return ! (a0 || a1); + } default: return 0; } @@ -2805,7 +3032,7 @@ operand_equal_for_comparison_p (tree arg0, tree arg1, tree other) /* Make sure shorter operand is extended the right way to match the longer operand. */ - primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type + primarg1 = fold_convert (get_signed_or_unsigned_type (unsignedp1, TREE_TYPE (primarg1)), primarg1); if (operand_equal_p (arg0, fold_convert (type, primarg1), 0)) @@ -3405,9 +3632,7 @@ optimize_bit_field_compare (enum tree_code code, tree compare_type, lbitpos = nbitsize - lbitsize - lbitpos; /* Make the mask to be used against the extracted field. */ - mask = build_int_cst (unsigned_type, -1); - mask = force_fit_type (mask, 0, false, false); - mask = fold_convert (unsigned_type, mask); + mask = build_int_cst_type (unsigned_type, -1); mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0); mask = const_binop (RSHIFT_EXPR, mask, size_int (nbitsize - lbitsize - lbitpos), 0); @@ -3415,15 +3640,19 @@ optimize_bit_field_compare (enum tree_code code, tree compare_type, if (! const_p) /* If not comparing with constant, just rework the comparison and return. */ - return build2 (code, compare_type, - build2 (BIT_AND_EXPR, unsigned_type, - make_bit_field_ref (linner, unsigned_type, - nbitsize, nbitpos, 1), - mask), - build2 (BIT_AND_EXPR, unsigned_type, - make_bit_field_ref (rinner, unsigned_type, - nbitsize, nbitpos, 1), - mask)); + return fold_build2 (code, compare_type, + fold_build2 (BIT_AND_EXPR, unsigned_type, + make_bit_field_ref (linner, + unsigned_type, + nbitsize, nbitpos, + 1), + mask), + fold_build2 (BIT_AND_EXPR, unsigned_type, + make_bit_field_ref (rinner, + unsigned_type, + nbitsize, nbitpos, + 1), + mask)); /* Otherwise, we are handling the constant case. See if the constant is too big for the field. Warn and return a tree of for 0 (false) if so. We do @@ -3561,8 +3790,7 @@ decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize, unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1); precision = TYPE_PRECISION (unsigned_type); - mask = build_int_cst (unsigned_type, -1); - mask = force_fit_type (mask, 0, false, false); + mask = build_int_cst_type (unsigned_type, -1); mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); @@ -3587,8 +3815,7 @@ all_ones_mask_p (tree mask, int size) unsigned int precision = TYPE_PRECISION (type); tree tmask; - tmask = build_int_cst (lang_hooks.types.signed_type (type), -1); - tmask = force_fit_type (tmask, 0, false, false); + tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1); return tree_int_cst_equal (mask, @@ -3620,7 +3847,7 @@ sign_bit_p (tree exp, tree val) /* Tree VAL must be an integer constant. */ if (TREE_CODE (val) != INTEGER_CST - || TREE_CONSTANT_OVERFLOW (val)) + || TREE_OVERFLOW (val)) return NULL_TREE; width = TYPE_PRECISION (t); @@ -3778,12 +4005,16 @@ range_binop (enum tree_code code, tree type, tree arg0, int upper0_p, /* Given EXP, a logical expression, set the range it is testing into variables denoted by PIN_P, PLOW, and PHIGH. Return the expression - actually being tested. *PLOW and *PHIGH will be made of the same type - as the returned expression. If EXP is not a comparison, we will most - likely not be returning a useful value and range. */ + actually being tested. *PLOW and *PHIGH will be made of the same + type as the returned expression. If EXP is not a comparison, we + will most likely not be returning a useful value and range. Set + *STRICT_OVERFLOW_P to true if the return value is only valid + because signed overflow is undefined; otherwise, do not change + *STRICT_OVERFLOW_P. */ static tree -make_range (tree exp, int *pin_p, tree *plow, tree *phigh) +make_range (tree exp, int *pin_p, tree *plow, tree *phigh, + bool *strict_overflow_p) { enum tree_code code; tree arg0 = NULL_TREE, arg1 = NULL_TREE; @@ -3807,7 +4038,7 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh) if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) { - if (TREE_CODE_LENGTH (code) > 0) + if (TREE_OPERAND_LENGTH (exp) > 0) arg0 = TREE_OPERAND (exp, 0); if (TREE_CODE_CLASS (code) == tcc_comparison || TREE_CODE_CLASS (code) == tcc_unary @@ -3816,7 +4047,7 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh) if (TREE_CODE_CLASS (code) == tcc_binary || TREE_CODE_CLASS (code) == tcc_comparison || (TREE_CODE_CLASS (code) == tcc_expression - && TREE_CODE_LENGTH (code) > 1)) + && TREE_OPERAND_LENGTH (exp) > 1)) arg1 = TREE_OPERAND (exp, 1); } @@ -3916,7 +4147,8 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh) /* If flag_wrapv and ARG0_TYPE is signed, then we cannot move a constant to the other side. */ - if (flag_wrapv && !TYPE_UNSIGNED (arg0_type)) + if (!TYPE_UNSIGNED (arg0_type) + && !TYPE_OVERFLOW_UNDEFINED (arg0_type)) break; /* If EXP is signed, any overflow in the computation is undefined, @@ -3931,6 +4163,9 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh) || (n_high != 0 && TREE_OVERFLOW (n_high))) break; + if (TYPE_OVERFLOW_UNDEFINED (arg0_type)) + *strict_overflow_p = true; + /* Check for an unsigned range which has wrapped around the maximum value thus making n_high < n_low, and normalize it. */ if (n_low && n_high && tree_int_cst_lt (n_high, n_low)) @@ -4000,8 +4235,7 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh) high_positive = fold_build2 (RSHIFT_EXPR, arg0_type, fold_convert (arg0_type, high_positive), - fold_convert (arg0_type, - integer_one_node)); + build_int_cst (arg0_type, 1)); /* If the low bound is specified, "and" the range with the range for which the original unsigned value will be @@ -4167,7 +4401,7 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) /* If we don't have wrap-around arithmetics upfront, try to force it. */ if (TREE_CODE (etype) == INTEGER_TYPE - && !TYPE_UNSIGNED (etype) && !flag_wrapv) + && !TYPE_OVERFLOW_WRAPS (etype)) { tree utype, minv, maxv; @@ -4647,7 +4881,7 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (PLUS_EXPR, arg2, - integer_one_node, 0), + build_int_cst (type, 1), 0), OEP_ONLY_CONST)) return pedantic_non_lvalue (fold_build2 (MIN_EXPR, type, arg1, arg2)); @@ -4659,7 +4893,7 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (MINUS_EXPR, arg2, - integer_one_node, 0), + build_int_cst (type, 1), 0), OEP_ONLY_CONST)) return pedantic_non_lvalue (fold_build2 (MIN_EXPR, type, arg1, arg2)); @@ -4671,7 +4905,7 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (MINUS_EXPR, arg2, - integer_one_node, 0), + build_int_cst (type, 1), 0), OEP_ONLY_CONST)) return pedantic_non_lvalue (fold_build2 (MAX_EXPR, type, arg1, arg2)); @@ -4683,7 +4917,7 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (PLUS_EXPR, arg2, - integer_one_node, 0), + build_int_cst (type, 1), 0), OEP_ONLY_CONST)) return pedantic_non_lvalue (fold_build2 (MAX_EXPR, type, arg1, arg2)); @@ -4713,9 +4947,12 @@ fold_range_test (enum tree_code code, tree type, tree op0, tree op1) || code == TRUTH_OR_EXPR); int in0_p, in1_p, in_p; tree low0, low1, low, high0, high1, high; - tree lhs = make_range (op0, &in0_p, &low0, &high0); - tree rhs = make_range (op1, &in1_p, &low1, &high1); + bool strict_overflow_p = false; + tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p); + tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p); tree tem; + const char * const warnmsg = G_("assuming signed overflow does not occur " + "when simplifying range test"); /* If this is an OR operation, invert both sides; we will invert again at the end. */ @@ -4733,7 +4970,11 @@ fold_range_test (enum tree_code code, tree type, tree op0, tree op1) lhs != 0 ? lhs : rhs != 0 ? rhs : integer_zero_node, in_p, low, high)))) - return or_op ? invert_truthvalue (tem) : tem; + { + if (strict_overflow_p) + fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); + return or_op ? invert_truthvalue (tem) : tem; + } /* On machines where the branch cost is expensive, if this is a short-circuited branch and the underlying object on both sides @@ -4763,9 +5004,14 @@ fold_range_test (enum tree_code code, tree type, tree op0, tree op1) && (0 != (rhs = build_range_check (type, common, or_op ? ! in1_p : in1_p, low1, high1)))) - return build2 (code == TRUTH_ANDIF_EXPR - ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, - type, lhs, rhs); + { + if (strict_overflow_p) + fold_overflow_warning (warnmsg, + WARN_STRICT_OVERFLOW_COMPARISON); + return build2 (code == TRUTH_ANDIF_EXPR + ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, + type, lhs, rhs); + } } } @@ -5042,11 +5288,6 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) return 0; } - /* After this point all optimizations will generate bit-field - references, which we might not want. */ - if (! lang_hooks.can_use_bit_fields_p ()) - return 0; - /* See if we can find a mode that contains both fields being compared on the left. If we can't, fail. Otherwise, update all constants and masks to be relative to a field of that size. */ @@ -5277,9 +5518,9 @@ optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) /* If something does not permit us to optimize, return the original tree. */ if ((op_code != MIN_EXPR && op_code != MAX_EXPR) || TREE_CODE (comp_const) != INTEGER_CST - || TREE_CONSTANT_OVERFLOW (comp_const) + || TREE_OVERFLOW (comp_const) || TREE_CODE (minmax_const) != INTEGER_CST - || TREE_CONSTANT_OVERFLOW (minmax_const)) + || TREE_OVERFLOW (minmax_const)) return NULL_TREE; /* Now handle all the various comparison codes. We only handle EQ_EXPR @@ -5369,10 +5610,15 @@ optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) addressing calculation. If we return a non-null expression, it is an equivalent form of the - original computation, but need not be in the original type. */ + original computation, but need not be in the original type. + + We set *STRICT_OVERFLOW_P to true if the return values depends on + signed overflow being undefined. Otherwise we do not change + *STRICT_OVERFLOW_P. */ static tree -extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type) +extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type, + bool *strict_overflow_p) { /* To avoid exponential search depth, refuse to allow recursion past three levels. Beyond that (1) it's highly unlikely that we'll find @@ -5386,14 +5632,15 @@ extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type) return NULL; depth++; - ret = extract_muldiv_1 (t, c, code, wide_type); + ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p); depth--; return ret; } static tree -extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) +extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, + bool *strict_overflow_p) { tree type = TREE_TYPE (t); enum tree_code tcode = TREE_CODE (t); @@ -5403,6 +5650,7 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) tree t1, t2; int same_p = tcode == code; tree op0 = NULL_TREE, op1 = NULL_TREE; + bool sub_strict_overflow_p; /* Don't deal with constants of zero here; they confuse the code below. */ if (integer_zerop (c)) @@ -5432,6 +5680,7 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) if ((COMPARISON_CLASS_P (op0) || UNARY_CLASS_P (op0) || BINARY_CLASS_P (op0) + || VL_EXP_CLASS_P (op0) || EXPRESSION_CLASS_P (op0)) /* ... and is unsigned, and its type is smaller than ctype, then we cannot pass through as widening. */ @@ -5456,10 +5705,11 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) possible later conversion to our or some other type. */ if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0 && TREE_CODE (t2) == INTEGER_CST - && ! TREE_CONSTANT_OVERFLOW (t2) + && !TREE_OVERFLOW (t2) && (0 != (t1 = extract_muldiv (op0, t2, code, code == MULT_EXPR - ? ctype : NULL_TREE)))) + ? ctype : NULL_TREE, + strict_overflow_p)))) return t1; break; @@ -5469,7 +5719,8 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type)) { tree cstype = (*lang_hooks.types.signed_type) (ctype); - if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0) + if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p)) + != 0) { t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1)); return fold_convert (ctype, t1); @@ -5478,7 +5729,8 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) } /* FALLTHROUGH */ case NEGATE_EXPR: - if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0) + if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p)) + != 0) return fold_build1 (tcode, ctype, fold_convert (ctype, t1)); break; @@ -5489,12 +5741,16 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) break; /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */ - if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0 - && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0) + sub_strict_overflow_p = false; + if ((t1 = extract_muldiv (op0, c, code, wide_type, + &sub_strict_overflow_p)) != 0 + && (t2 = extract_muldiv (op1, c, code, wide_type, + &sub_strict_overflow_p)) != 0) { if (tree_int_cst_sgn (c) < 0) tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR); - + if (sub_strict_overflow_p) + *strict_overflow_p = true; return fold_build2 (tcode, ctype, fold_convert (ctype, t1), fold_convert (ctype, t2)); } @@ -5517,11 +5773,11 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) const_binop (LSHIFT_EXPR, size_one_node, op1, 0))) - && ! TREE_OVERFLOW (t1)) + && !TREE_OVERFLOW (t1)) return extract_muldiv (build2 (tcode == LSHIFT_EXPR ? MULT_EXPR : FLOOR_DIV_EXPR, ctype, fold_convert (ctype, op0), t1), - c, code, wide_type); + c, code, wide_type, strict_overflow_p); break; case PLUS_EXPR: case MINUS_EXPR: @@ -5529,16 +5785,21 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) can return a new PLUS or MINUS. If we can't, the only remaining cases where we can do anything are if the second operand is a constant. */ - t1 = extract_muldiv (op0, c, code, wide_type); - t2 = extract_muldiv (op1, c, code, wide_type); + sub_strict_overflow_p = false; + t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p); + t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p); if (t1 != 0 && t2 != 0 && (code == MULT_EXPR /* If not multiplication, we can only do this if both operands are divisible by c. */ || (multiple_of_p (ctype, op0, c) && multiple_of_p (ctype, op1, c)))) - return fold_build2 (tcode, ctype, fold_convert (ctype, t1), - fold_convert (ctype, t2)); + { + if (sub_strict_overflow_p) + *strict_overflow_p = true; + return fold_build2 (tcode, ctype, fold_convert (ctype, t1), + fold_convert (ctype, t2)); + } /* If this was a subtraction, negate OP1 and set it to be an addition. This simplifies the logic below. */ @@ -5571,7 +5832,7 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) fold_convert (ctype, c), 0); /* We allow the constant to overflow with wrapping semantics. */ if (op1 == 0 - || (TREE_OVERFLOW (op1) && ! flag_wrapv)) + || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype))) break; } else @@ -5619,11 +5880,13 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) new operation. Likewise for the RHS from a MULT_EXPR. Otherwise, do something only if the second operand is a constant. */ if (same_p - && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0) + && (t1 = extract_muldiv (op0, c, code, wide_type, + strict_overflow_p)) != 0) return fold_build2 (tcode, ctype, fold_convert (ctype, t1), fold_convert (ctype, op1)); else if (tcode == MULT_EXPR && code == MULT_EXPR - && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0) + && (t1 = extract_muldiv (op1, c, code, wide_type, + strict_overflow_p)) != 0) return fold_build2 (tcode, ctype, fold_convert (ctype, op0), fold_convert (ctype, t1)); else if (TREE_CODE (op1) != INTEGER_CST) @@ -5634,7 +5897,7 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) if (tcode == code && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1), fold_convert (ctype, c), 0)) - && ! TREE_OVERFLOW (t1)) + && !TREE_OVERFLOW (t1)) return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1); /* If these operations "cancel" each other, we have the main @@ -5645,24 +5908,31 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type) If we have an unsigned type that is not a sizetype, we cannot do this since it will change the result if the original computation overflowed. */ - if ((! TYPE_UNSIGNED (ctype) + if ((TYPE_OVERFLOW_UNDEFINED (ctype) || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))) - && ! flag_wrapv && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR) || (tcode == MULT_EXPR && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR))) { if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) - return fold_build2 (tcode, ctype, fold_convert (ctype, op0), - fold_convert (ctype, - const_binop (TRUNC_DIV_EXPR, - op1, c, 0))); + { + if (TYPE_OVERFLOW_UNDEFINED (ctype)) + *strict_overflow_p = true; + return fold_build2 (tcode, ctype, fold_convert (ctype, op0), + fold_convert (ctype, + const_binop (TRUNC_DIV_EXPR, + op1, c, 0))); + } else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0))) - return fold_build2 (code, ctype, fold_convert (ctype, op0), - fold_convert (ctype, - const_binop (TRUNC_DIV_EXPR, - c, op1, 0))); + { + if (TYPE_OVERFLOW_UNDEFINED (ctype)) + *strict_overflow_p = true; + return fold_build2 (code, ctype, fold_convert (ctype, op0), + fold_convert (ctype, + const_binop (TRUNC_DIV_EXPR, + c, op1, 0))); + } } break; @@ -5885,7 +6155,7 @@ fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, if (BUILTIN_SQRT_P (fcode)) { - tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1)); + tree arg = CALL_EXPR_ARG (arg0, 0); enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0)); c = TREE_REAL_CST (arg1); @@ -6059,11 +6329,6 @@ fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1) return fold_build2 (neg ? GE_EXPR : LE_EXPR, type, arg0, build_real (TREE_TYPE (arg0), max)); - /* The transformation below creates non-gimple code and thus is - not appropriate if we are in gimple form. */ - if (in_gimple_form) - return NULL_TREE; - temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type, arg0, build_real (TREE_TYPE (arg0), max)); return fold_build1 (TRUTH_NOT_EXPR, type, temp); @@ -6105,13 +6370,14 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) TREE_INT_CST_LOW (arg1), TREE_INT_CST_HIGH (arg1), &lpart, &hpart, unsigned_p); - prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart); - prod = force_fit_type (prod, -1, overflow, false); + prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart, + -1, overflow); neg_overflow = false; if (unsigned_p) { - tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0); + tmp = int_const_binop (MINUS_EXPR, arg01, + build_int_cst (TREE_TYPE (arg01), 1), 0); lo = prod; /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */ @@ -6120,13 +6386,13 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) TREE_INT_CST_LOW (tmp), TREE_INT_CST_HIGH (tmp), &lpart, &hpart, unsigned_p); - hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart); - hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod), - TREE_CONSTANT_OVERFLOW (prod)); + hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart, + -1, overflow | TREE_OVERFLOW (prod)); } else if (tree_int_cst_sgn (arg01) >= 0) { - tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0); + tmp = int_const_binop (MINUS_EXPR, arg01, + build_int_cst (TREE_TYPE (arg01), 1), 0); switch (tree_int_cst_sgn (arg1)) { case -1: @@ -6154,7 +6420,8 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) /* A negative divisor reverses the relational operators. */ code = swap_tree_comparison (code); - tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0); + tmp = int_const_binop (PLUS_EXPR, arg01, + build_int_cst (TREE_TYPE (arg01), 1), 0); switch (tree_int_cst_sgn (arg1)) { case -1: @@ -6292,7 +6559,7 @@ fold_single_bit_test (enum tree_code code, tree arg0, tree arg1, enum machine_mode operand_mode = TYPE_MODE (type); int ops_unsigned; tree signed_type, unsigned_type, intermediate_type; - tree tem; + tree tem, one; /* First, see if we can fold the single bit test into a sign-bit test. */ @@ -6337,13 +6604,13 @@ fold_single_bit_test (enum tree_code code, tree arg0, tree arg1, inner = build2 (RSHIFT_EXPR, intermediate_type, inner, size_int (bitnum)); + one = build_int_cst (intermediate_type, 1); + if (code == EQ_EXPR) - inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, - inner, integer_one_node); + inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one); /* Put the AND last so it can combine with more things. */ - inner = build2 (BIT_AND_EXPR, intermediate_type, - inner, integer_one_node); + inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one); /* Make sure to return the proper type. */ inner = fold_convert (result_type, inner); @@ -6405,11 +6672,6 @@ tree_swap_operands_p (tree arg0, tree arg1, bool reorder) && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))) return 0; - if (DECL_P (arg1)) - return 0; - if (DECL_P (arg0)) - return 1; - /* It is preferable to swap two SSA_NAME to ensure a canonical form for commutative and comparison operators. Ensuring a canonical form allows the optimizers to find additional redundancies without @@ -6419,6 +6681,18 @@ tree_swap_operands_p (tree arg0, tree arg1, bool reorder) && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1)) return 1; + /* Put SSA_NAMEs last. */ + if (TREE_CODE (arg1) == SSA_NAME) + return 0; + if (TREE_CODE (arg0) == SSA_NAME) + return 1; + + /* Put variables last. */ + if (DECL_P (arg1)) + return 0; + if (DECL_P (arg0)) + return 1; + return 0; } @@ -6519,7 +6793,7 @@ static tree fold_sign_changed_comparison (enum tree_code code, tree type, tree arg0, tree arg1) { - tree arg0_inner, tmp; + tree arg0_inner; tree inner_type, outer_type; if (TREE_CODE (arg0) != NOP_EXPR @@ -6554,14 +6828,9 @@ fold_sign_changed_comparison (enum tree_code code, tree type, return NULL_TREE; if (TREE_CODE (arg1) == INTEGER_CST) - { - tmp = build_int_cst_wide (inner_type, - TREE_INT_CST_LOW (arg1), - TREE_INT_CST_HIGH (arg1)); - arg1 = force_fit_type (tmp, 0, - TREE_OVERFLOW (arg1), - TREE_CONSTANT_OVERFLOW (arg1)); - } + arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1), + TREE_INT_CST_HIGH (arg1), 0, + TREE_OVERFLOW (arg1)); else arg1 = fold_convert (inner_type, arg1); @@ -6582,6 +6851,7 @@ try_move_mult_to_index (enum tree_code code, tree addr, tree op1) tree ref = TREE_OPERAND (addr, 0), pref; tree ret, pos; tree itype; + bool mdim = false; /* Canonicalize op1 into a possibly non-constant delta and an INTEGER_CST s. */ @@ -6621,6 +6891,10 @@ try_move_mult_to_index (enum tree_code code, tree addr, tree op1) { if (TREE_CODE (ref) == ARRAY_REF) { + /* Remember if this was a multi-dimensional array. */ + if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF) + mdim = true; + itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); if (! itype) continue; @@ -6643,8 +6917,32 @@ try_move_mult_to_index (enum tree_code code, tree addr, tree op1) delta = tmp; } + /* Only fold here if we can verify we do not overflow one + dimension of a multi-dimensional array. */ + if (mdim) + { + tree tmp; + + if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST + || !INTEGRAL_TYPE_P (itype) + || !TYPE_MAX_VALUE (itype) + || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST) + continue; + + tmp = fold_binary (code, itype, + fold_convert (itype, + TREE_OPERAND (ref, 1)), + fold_convert (itype, delta)); + if (!tmp + || TREE_CODE (tmp) != INTEGER_CST + || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp)) + continue; + } + break; } + else + mdim = false; if (!handled_component_p (ref)) return NULL_TREE; @@ -6787,7 +7085,7 @@ fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1) else maybe_same = arg11; - if (exact_log2 (int11) > 0 && int01 % int11 == 0) + if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0) { alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00, build_int_cst (TREE_TYPE (arg00), @@ -6864,7 +7162,7 @@ native_encode_real (tree expr, unsigned char *ptr, int len) { tree type = TREE_TYPE (expr); int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); - int byte, offset, word, words; + int byte, offset, word, words, bitpos; unsigned char value; /* There are always 32 bits in each long, no matter the size of @@ -6874,19 +7172,20 @@ native_encode_real (tree expr, unsigned char *ptr, int len) if (total_bytes > len) return 0; - words = total_bytes / UNITS_PER_WORD; + words = 32 / UNITS_PER_WORD; real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type)); - for (byte = 0; byte < total_bytes; byte++) + for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; + bitpos += BITS_PER_UNIT) { - int bitpos = byte * BITS_PER_UNIT; + byte = (bitpos / BITS_PER_UNIT) & 3; value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31)); - if (total_bytes > UNITS_PER_WORD) + if (UNITS_PER_WORD < 4) { word = byte / UNITS_PER_WORD; - if (FLOAT_WORDS_BIG_ENDIAN) + if (WORDS_BIG_ENDIAN) word = (words - 1) - word; offset = word * UNITS_PER_WORD; if (BYTES_BIG_ENDIAN) @@ -6895,8 +7194,8 @@ native_encode_real (tree expr, unsigned char *ptr, int len) offset += byte % UNITS_PER_WORD; } else - offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; - ptr[offset] = value; + offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; + ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value; } return total_bytes; } @@ -7039,8 +7338,7 @@ native_interpret_int (tree type, unsigned char *ptr, int len) << (bitpos - HOST_BITS_PER_WIDE_INT); } - return force_fit_type (build_int_cst_wide (type, lo, hi), - 0, false, false); + return build_int_cst_wide_type (type, lo, hi); } @@ -7053,7 +7351,7 @@ native_interpret_real (tree type, unsigned char *ptr, int len) { enum machine_mode mode = TYPE_MODE (type); int total_bytes = GET_MODE_SIZE (mode); - int byte, offset, word, words; + int byte, offset, word, words, bitpos; unsigned char value; /* There are always 32 bits in each long, no matter the size of the hosts long. We handle floating point representations with @@ -7064,16 +7362,17 @@ native_interpret_real (tree type, unsigned char *ptr, int len) total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); if (total_bytes > len || total_bytes > 24) return NULL_TREE; - words = total_bytes / UNITS_PER_WORD; + words = 32 / UNITS_PER_WORD; memset (tmp, 0, sizeof (tmp)); - for (byte = 0; byte < total_bytes; byte++) + for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT; + bitpos += BITS_PER_UNIT) { - int bitpos = byte * BITS_PER_UNIT; - if (total_bytes > UNITS_PER_WORD) + byte = (bitpos / BITS_PER_UNIT) & 3; + if (UNITS_PER_WORD < 4) { word = byte / UNITS_PER_WORD; - if (FLOAT_WORDS_BIG_ENDIAN) + if (WORDS_BIG_ENDIAN) word = (words - 1) - word; offset = word * UNITS_PER_WORD; if (BYTES_BIG_ENDIAN) @@ -7082,8 +7381,8 @@ native_interpret_real (tree type, unsigned char *ptr, int len) offset += byte % UNITS_PER_WORD; } else - offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte; - value = ptr[offset]; + offset = BYTES_BIG_ENDIAN ? 3 - byte : byte; + value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)]; tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31); } @@ -7311,9 +7610,6 @@ fold_unary (enum tree_code code, tree type, tree op0) case FLOAT_EXPR: case CONVERT_EXPR: case FIX_TRUNC_EXPR: - case FIX_CEIL_EXPR: - case FIX_FLOOR_EXPR: - case FIX_ROUND_EXPR: if (TREE_TYPE (op0) == type) return op0; @@ -7439,15 +7735,17 @@ fold_unary (enum tree_code code, tree type, tree op0) return fold_convert (type, build_fold_addr_expr (base)); } - if (TREE_CODE (op0) == MODIFY_EXPR - && TREE_CONSTANT (TREE_OPERAND (op0, 1)) + if ((TREE_CODE (op0) == MODIFY_EXPR + || TREE_CODE (op0) == GIMPLE_MODIFY_STMT) + && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1)) /* Detect assigning a bitfield. */ - && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF - && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1)))) + && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF + && DECL_BIT_FIELD + (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1)))) { /* Don't leave an assignment inside a conversion unless assigning a bitfield. */ - tem = fold_build1 (code, type, TREE_OPERAND (op0, 1)); + tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1)); /* First do the assignment, then return converted constant. */ tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem); TREE_NO_WARNING (tem) = 1; @@ -7495,10 +7793,9 @@ fold_unary (enum tree_code code, tree type, tree op0) } if (change) { - tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1), - TREE_INT_CST_HIGH (and1)); - tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1), - TREE_CONSTANT_OVERFLOW (and1)); + tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1), + TREE_INT_CST_HIGH (and1), 0, + TREE_OVERFLOW (and1)); return fold_build2 (BIT_AND_EXPR, type, fold_convert (type, and0), tem); } @@ -7544,6 +7841,8 @@ fold_unary (enum tree_code code, tree type, tree op0) return tem ? tem : NULL_TREE; case VIEW_CONVERT_EXPR: + if (TREE_TYPE (op0) == type) + return op0; if (TREE_CODE (op0) == VIEW_CONVERT_EXPR) return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0)); return fold_view_convert_expr (type, op0); @@ -7570,7 +7869,9 @@ fold_unary (enum tree_code code, tree type, tree op0) targ0)); } /* ABS_EXPR> = ABS_EXPR even if flag_wrapv is on. */ - else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR) + else if (TREE_CODE (arg0) == ABS_EXPR) + return arg0; + else if (tree_expr_nonnegative_p (arg0)) return arg0; /* Strip sign ops from argument. */ @@ -7673,6 +7974,22 @@ fold_unary (enum tree_code code, tree type, tree op0) tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0)); return fold_convert (type, tem); } + if (TREE_CODE (arg0) == CALL_EXPR) + { + tree fn = get_callee_fndecl (arg0); + if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL) + switch (DECL_FUNCTION_CODE (fn)) + { + CASE_FLT_FN (BUILT_IN_CEXPI): + fn = mathfn_built_in (type, BUILT_IN_COS); + if (fn) + return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0)); + break; + + default: + break; + } + } return NULL_TREE; case IMAGPART_EXPR: @@ -7699,10 +8016,26 @@ fold_unary (enum tree_code code, tree type, tree op0) tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)); return fold_convert (type, negate_expr (tem)); } - return NULL_TREE; + if (TREE_CODE (arg0) == CALL_EXPR) + { + tree fn = get_callee_fndecl (arg0); + if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL) + switch (DECL_FUNCTION_CODE (fn)) + { + CASE_FLT_FN (BUILT_IN_CEXPI): + fn = mathfn_built_in (type, BUILT_IN_SIN); + if (fn) + return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0)); + break; - default: - return NULL_TREE; + default: + break; + } + } + return NULL_TREE; + + default: + return NULL_TREE; } /* switch (code) */ } @@ -7723,24 +8056,24 @@ fold_minmax (enum tree_code code, tree type, tree op0, tree op1) else gcc_unreachable (); - /* MIN (MAX (a, b), b) == b.  */ + /* MIN (MAX (a, b), b) == b. */ if (TREE_CODE (op0) == compl_code && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0)) return omit_one_operand (type, op1, TREE_OPERAND (op0, 0)); - /* MIN (MAX (b, a), b) == b.  */ + /* MIN (MAX (b, a), b) == b. */ if (TREE_CODE (op0) == compl_code && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0) && reorder_operands_p (TREE_OPERAND (op0, 1), op1)) return omit_one_operand (type, op1, TREE_OPERAND (op0, 1)); - /* MIN (a, MAX (a, b)) == a.  */ + /* MIN (a, MAX (a, b)) == a. */ if (TREE_CODE (op1) == compl_code && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0) && reorder_operands_p (op0, TREE_OPERAND (op1, 1))) return omit_one_operand (type, op0, TREE_OPERAND (op1, 1)); - /* MIN (a, MAX (b, a)) == a.  */ + /* MIN (a, MAX (b, a)) == a. */ if (TREE_CODE (op1) == compl_code && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0) && reorder_operands_p (op0, TREE_OPERAND (op1, 0))) @@ -7749,6 +8082,143 @@ fold_minmax (enum tree_code code, tree type, tree op0, tree op1) return NULL_TREE; } +/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1 + by changing CODE to reduce the magnitude of constants involved in + ARG0 of the comparison. + Returns a canonicalized comparison tree if a simplification was + possible, otherwise returns NULL_TREE. + Set *STRICT_OVERFLOW_P to true if the canonicalization is only + valid if signed overflow is undefined. */ + +static tree +maybe_canonicalize_comparison_1 (enum tree_code code, tree type, + tree arg0, tree arg1, + bool *strict_overflow_p) +{ + enum tree_code code0 = TREE_CODE (arg0); + tree t, cst0 = NULL_TREE; + int sgn0; + bool swap = false; + + /* Match A +- CST code arg1 and CST code arg1. */ + if (!(((code0 == MINUS_EXPR + || code0 == PLUS_EXPR) + && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) + || code0 == INTEGER_CST)) + return NULL_TREE; + + /* Identify the constant in arg0 and its sign. */ + if (code0 == INTEGER_CST) + cst0 = arg0; + else + cst0 = TREE_OPERAND (arg0, 1); + sgn0 = tree_int_cst_sgn (cst0); + + /* Overflowed constants and zero will cause problems. */ + if (integer_zerop (cst0) + || TREE_OVERFLOW (cst0)) + return NULL_TREE; + + /* See if we can reduce the magnitude of the constant in + arg0 by changing the comparison code. */ + if (code0 == INTEGER_CST) + { + /* CST <= arg1 -> CST-1 < arg1. */ + if (code == LE_EXPR && sgn0 == 1) + code = LT_EXPR; + /* -CST < arg1 -> -CST-1 <= arg1. */ + else if (code == LT_EXPR && sgn0 == -1) + code = LE_EXPR; + /* CST > arg1 -> CST-1 >= arg1. */ + else if (code == GT_EXPR && sgn0 == 1) + code = GE_EXPR; + /* -CST >= arg1 -> -CST-1 > arg1. */ + else if (code == GE_EXPR && sgn0 == -1) + code = GT_EXPR; + else + return NULL_TREE; + /* arg1 code' CST' might be more canonical. */ + swap = true; + } + else + { + /* A - CST < arg1 -> A - CST-1 <= arg1. */ + if (code == LT_EXPR + && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR)) + code = LE_EXPR; + /* A + CST > arg1 -> A + CST-1 >= arg1. */ + else if (code == GT_EXPR + && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR)) + code = GE_EXPR; + /* A + CST <= arg1 -> A + CST-1 < arg1. */ + else if (code == LE_EXPR + && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR)) + code = LT_EXPR; + /* A - CST >= arg1 -> A - CST-1 > arg1. */ + else if (code == GE_EXPR + && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR)) + code = GT_EXPR; + else + return NULL_TREE; + *strict_overflow_p = true; + } + + /* Now build the constant reduced in magnitude. */ + t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR, + cst0, build_int_cst (TREE_TYPE (cst0), 1), 0); + if (code0 != INTEGER_CST) + t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t); + + /* If swapping might yield to a more canonical form, do so. */ + if (swap) + return fold_build2 (swap_tree_comparison (code), type, arg1, t); + else + return fold_build2 (code, type, t, arg1); +} + +/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined + overflow further. Try to decrease the magnitude of constants involved + by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa + and put sole constants at the second argument position. + Returns the canonicalized tree if changed, otherwise NULL_TREE. */ + +static tree +maybe_canonicalize_comparison (enum tree_code code, tree type, + tree arg0, tree arg1) +{ + tree t; + bool strict_overflow_p; + const char * const warnmsg = G_("assuming signed overflow does not occur " + "when reducing constant in comparison"); + + /* In principle pointers also have undefined overflow behavior, + but that causes problems elsewhere. */ + if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) + || POINTER_TYPE_P (TREE_TYPE (arg0))) + return NULL_TREE; + + /* Try canonicalization by simplifying arg0. */ + strict_overflow_p = false; + t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1, + &strict_overflow_p); + if (t) + { + if (strict_overflow_p) + fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); + return t; + } + + /* Try canonicalization by simplifying arg1 using the swapped + comparison. */ + code = swap_tree_comparison (code); + strict_overflow_p = false; + t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0, + &strict_overflow_p); + if (t && strict_overflow_p) + fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); + return t; +} + /* Subroutine of fold_binary. This routine performs all of the transformations that are common to the equality/inequality operators (EQ_EXPR and NE_EXPR) and the ordering operators @@ -7780,8 +8250,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) - && !TYPE_UNSIGNED (TREE_TYPE (arg1)) - && !(flag_wrapv || flag_trapv)) + && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) && (TREE_CODE (arg1) == INTEGER_CST && !TREE_OVERFLOW (arg1))) { @@ -7794,19 +8263,169 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR, TREE_TYPE (arg1), const2, const1); + + /* If the constant operation overflowed this can be + simplified as a comparison against INT_MAX/INT_MIN. */ + if (TREE_CODE (lhs) == INTEGER_CST + && TREE_OVERFLOW (lhs)) + { + int const1_sgn = tree_int_cst_sgn (const1); + enum tree_code code2 = code; + + /* Get the sign of the constant on the lhs if the + operation were VARIABLE + CONST1. */ + if (TREE_CODE (arg0) == MINUS_EXPR) + const1_sgn = -const1_sgn; + + /* The sign of the constant determines if we overflowed + INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1). + Canonicalize to the INT_MIN overflow by swapping the comparison + if necessary. */ + if (const1_sgn == -1) + code2 = swap_tree_comparison (code); + + /* We now can look at the canonicalized case + VARIABLE + 1 CODE2 INT_MIN + and decide on the result. */ + if (code2 == LT_EXPR + || code2 == LE_EXPR + || code2 == EQ_EXPR) + return omit_one_operand (type, boolean_false_node, variable); + else if (code2 == NE_EXPR + || code2 == GE_EXPR + || code2 == GT_EXPR) + return omit_one_operand (type, boolean_true_node, variable); + } + if (TREE_CODE (lhs) == TREE_CODE (arg1) && (TREE_CODE (lhs) != INTEGER_CST || !TREE_OVERFLOW (lhs))) - return fold_build2 (code, type, variable, lhs); + { + fold_overflow_warning (("assuming signed overflow does not occur " + "when changing X +- C1 cmp C2 to " + "X cmp C1 +- C2"), + WARN_STRICT_OVERFLOW_COMPARISON); + return fold_build2 (code, type, variable, lhs); + } + } + + /* For comparisons of pointers we can decompose it to a compile time + comparison of the base objects and the offsets into the object. + This requires at least one operand being an ADDR_EXPR to do more + than the operand_equal_p test below. */ + if (POINTER_TYPE_P (TREE_TYPE (arg0)) + && (TREE_CODE (arg0) == ADDR_EXPR + || TREE_CODE (arg1) == ADDR_EXPR)) + { + tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE; + HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0; + enum machine_mode mode; + int volatilep, unsignedp; + bool indirect_base0 = false; + + /* Get base and offset for the access. Strip ADDR_EXPR for + get_inner_reference, but put it back by stripping INDIRECT_REF + off the base object if possible. */ + base0 = arg0; + if (TREE_CODE (arg0) == ADDR_EXPR) + { + base0 = get_inner_reference (TREE_OPERAND (arg0, 0), + &bitsize, &bitpos0, &offset0, &mode, + &unsignedp, &volatilep, false); + if (TREE_CODE (base0) == INDIRECT_REF) + base0 = TREE_OPERAND (base0, 0); + else + indirect_base0 = true; + } + + base1 = arg1; + if (TREE_CODE (arg1) == ADDR_EXPR) + { + base1 = get_inner_reference (TREE_OPERAND (arg1, 0), + &bitsize, &bitpos1, &offset1, &mode, + &unsignedp, &volatilep, false); + /* We have to make sure to have an indirect/non-indirect base1 + just the same as we did for base0. */ + if (TREE_CODE (base1) == INDIRECT_REF + && !indirect_base0) + base1 = TREE_OPERAND (base1, 0); + else if (!indirect_base0) + base1 = NULL_TREE; + } + else if (indirect_base0) + base1 = NULL_TREE; + + /* If we have equivalent bases we might be able to simplify. */ + if (base0 && base1 + && operand_equal_p (base0, base1, 0)) + { + /* We can fold this expression to a constant if the non-constant + offset parts are equal. */ + if (offset0 == offset1 + || (offset0 && offset1 + && operand_equal_p (offset0, offset1, 0))) + { + switch (code) + { + case EQ_EXPR: + return build_int_cst (boolean_type_node, bitpos0 == bitpos1); + case NE_EXPR: + return build_int_cst (boolean_type_node, bitpos0 != bitpos1); + case LT_EXPR: + return build_int_cst (boolean_type_node, bitpos0 < bitpos1); + case LE_EXPR: + return build_int_cst (boolean_type_node, bitpos0 <= bitpos1); + case GE_EXPR: + return build_int_cst (boolean_type_node, bitpos0 >= bitpos1); + case GT_EXPR: + return build_int_cst (boolean_type_node, bitpos0 > bitpos1); + default:; + } + } + /* We can simplify the comparison to a comparison of the variable + offset parts if the constant offset parts are equal. + Be careful to use signed size type here because otherwise we + mess with array offsets in the wrong way. This is possible + because pointer arithmetic is restricted to retain within an + object and overflow on pointer differences is undefined as of + 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */ + else if (bitpos0 == bitpos1) + { + tree signed_size_type_node; + signed_size_type_node = signed_type_for (size_type_node); + + /* By converting to signed size type we cover middle-end pointer + arithmetic which operates on unsigned pointer types of size + type size and ARRAY_REF offsets which are properly sign or + zero extended from their type in case it is narrower than + size type. */ + if (offset0 == NULL_TREE) + offset0 = build_int_cst (signed_size_type_node, 0); + else + offset0 = fold_convert (signed_size_type_node, offset0); + if (offset1 == NULL_TREE) + offset1 = build_int_cst (signed_size_type_node, 0); + else + offset1 = fold_convert (signed_size_type_node, offset1); + + return fold_build2 (code, type, offset0, offset1); + } + } } /* If this is a comparison of two exprs that look like an ARRAY_REF of the same object, then we can fold this to a comparison of the two offsets in signed size type. This is possible because pointer arithmetic is restricted to retain within an object and overflow on pointer differences - is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */ + is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. + + We check flag_wrapv directly because pointers types are unsigned, + and therefore TYPE_OVERFLOW_WRAPS returns true for them. That is + normally what we want to avoid certain odd overflow cases, but + not here. */ if (POINTER_TYPE_P (TREE_TYPE (arg0)) - && !flag_wrapv && !flag_trapv) + && !flag_wrapv + && !TYPE_OVERFLOW_TRAPS (TREE_TYPE (arg0))) { tree base0, offset0, base1, offset1; @@ -7839,8 +8458,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) X CMP Y +- C2 +- C1 for signed X, Y. This is valid if the resulting offset is smaller in absolute value than the original one. */ - if (!(flag_wrapv || flag_trapv) - && !TYPE_UNSIGNED (TREE_TYPE (arg0)) + if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))) @@ -7853,6 +8471,9 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) tree variable1 = TREE_OPERAND (arg0, 0); tree variable2 = TREE_OPERAND (arg1, 0); tree cst; + const char * const warnmsg = G_("assuming signed overflow does not " + "occur when combining constants around " + "a comparison"); /* Put the constant on the side where it doesn't overflow and is of lower absolute value than before. */ @@ -7861,22 +8482,61 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) const2, const1, 0); if (!TREE_OVERFLOW (cst) && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)) - return fold_build2 (code, type, - variable1, - fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1), - variable2, cst)); + { + fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); + return fold_build2 (code, type, + variable1, + fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1), + variable2, cst)); + } cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1) ? MINUS_EXPR : PLUS_EXPR, const1, const2, 0); if (!TREE_OVERFLOW (cst) && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)) - return fold_build2 (code, type, - fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0), - variable1, cst), - variable2); + { + fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); + return fold_build2 (code, type, + fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0), + variable1, cst), + variable2); + } + } + + /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the + signed arithmetic case. That form is created by the compiler + often enough for folding it to be of value. One example is in + computing loop trip counts after Operator Strength Reduction. */ + if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)) + && TREE_CODE (arg0) == MULT_EXPR + && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST + && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))) + && integer_zerop (arg1)) + { + tree const1 = TREE_OPERAND (arg0, 1); + tree const2 = arg1; /* zero */ + tree variable1 = TREE_OPERAND (arg0, 0); + enum tree_code cmp_code = code; + + gcc_assert (!integer_zerop (const1)); + + fold_overflow_warning (("assuming signed overflow does not occur when " + "eliminating multiplication in comparison " + "with zero"), + WARN_STRICT_OVERFLOW_COMPARISON); + + /* If const1 is negative we swap the sense of the comparison. */ + if (tree_int_cst_sgn (const1) < 0) + cmp_code = swap_tree_comparison (cmp_code); + + return fold_build2 (cmp_code, type, variable1, const2); } + tem = maybe_canonicalize_comparison (code, type, arg0, arg1); + if (tem) + return tem; + if (FLOAT_TYPE_P (TREE_TYPE (arg0))) { tree targ0 = strip_float_extensions (arg0); @@ -7944,7 +8604,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR, arg1, TREE_OPERAND (arg0, 1), 0)) - && ! TREE_CONSTANT_OVERFLOW (tem)) + && !TREE_OVERFLOW (tem)) return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); /* Likewise, we can simplify a comparison of a real constant with @@ -7956,7 +8616,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0), arg1, 0)) - && ! TREE_CONSTANT_OVERFLOW (tem)) + && !TREE_OVERFLOW (tem)) return fold_build2 (swap_tree_comparison (code), type, TREE_OPERAND (arg0, 1), tem); @@ -8236,6 +8896,27 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) return tem; } + /* Fold ~X op ~Y as Y op X. */ + if (TREE_CODE (arg0) == BIT_NOT_EXPR + && TREE_CODE (arg1) == BIT_NOT_EXPR) + { + tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); + return fold_build2 (code, type, + fold_convert (cmp_type, TREE_OPERAND (arg1, 0)), + TREE_OPERAND (arg0, 0)); + } + + /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */ + if (TREE_CODE (arg0) == BIT_NOT_EXPR + && TREE_CODE (arg1) == INTEGER_CST) + { + tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); + return fold_build2 (swap_tree_comparison (code), type, + TREE_OPERAND (arg0, 0), + fold_build1 (BIT_NOT_EXPR, cmp_type, + fold_convert (cmp_type, arg1))); + } + return NULL_TREE; } @@ -8287,8 +8968,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) enum tree_code_class kind = TREE_CODE_CLASS (code); tree arg0, arg1, tem; tree t1 = NULL_TREE; + bool strict_overflow_p; - gcc_assert (IS_EXPR_CODE_CLASS (kind) + gcc_assert ((IS_EXPR_CODE_CLASS (kind) + || IS_GIMPLE_STMT_CODE_CLASS (kind)) && TREE_CODE_LENGTH (code) == 2 && op0 != NULL_TREE && op1 != NULL_TREE); @@ -8455,6 +9138,24 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (integer_zerop (arg1)) return non_lvalue (fold_convert (type, arg0)); + /* ~X + X is -1. */ + if (TREE_CODE (arg0) == BIT_NOT_EXPR + && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) + && !TYPE_OVERFLOW_TRAPS (type)) + { + t1 = build_int_cst_type (type, -1); + return omit_one_operand (type, t1, arg1); + } + + /* X + ~X is -1. */ + if (TREE_CODE (arg1) == BIT_NOT_EXPR + && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) + && !TYPE_OVERFLOW_TRAPS (type)) + { + t1 = build_int_cst_type (type, -1); + return omit_one_operand (type, t1, arg0); + } + /* If we are adding two BIT_AND_EXPR's, both of which are and'ing with a constant, and the two constants have no bits in common, we should treat this as a BIT_IOR_EXPR since this may produce more @@ -8548,6 +9249,41 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) fold_convert (type, tem)); } + /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y ) + to __complex__ ( x, y ). This is not the same for SNaNs or + if signed zeros are involved. */ + if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) + && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) + && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) + { + tree rtype = TREE_TYPE (TREE_TYPE (arg0)); + tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0); + tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0); + bool arg0rz = false, arg0iz = false; + if ((arg0r && (arg0rz = real_zerop (arg0r))) + || (arg0i && (arg0iz = real_zerop (arg0i)))) + { + tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1); + tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1); + if (arg0rz && arg1i && real_zerop (arg1i)) + { + tree rp = arg1r ? arg1r + : build1 (REALPART_EXPR, rtype, arg1); + tree ip = arg0i ? arg0i + : build1 (IMAGPART_EXPR, rtype, arg0); + return fold_build2 (COMPLEX_EXPR, type, rp, ip); + } + else if (arg0iz && arg1r && real_zerop (arg1r)) + { + tree rp = arg0r ? arg0r + : build1 (REALPART_EXPR, rtype, arg0); + tree ip = arg1i ? arg1i + : build1 (IMAGPART_EXPR, rtype, arg1); + return fold_build2 (COMPLEX_EXPR, type, rp, ip); + } + } + } + if (flag_unsafe_math_optimizations && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) @@ -8673,6 +9409,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tree var0, con0, lit0, minus_lit0; tree var1, con1, lit1, minus_lit1; + bool ok = true; /* Split both trees into variables, constants, and literals. Then associate each group together, the constants with literals, @@ -8683,12 +9420,32 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1, code == MINUS_EXPR); + /* With undefined overflow we can only associate constants + with one variable. */ + if ((POINTER_TYPE_P (type) + || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) + && var0 && var1) + { + tree tmp0 = var0; + tree tmp1 = var1; + + if (TREE_CODE (tmp0) == NEGATE_EXPR) + tmp0 = TREE_OPERAND (tmp0, 0); + if (TREE_CODE (tmp1) == NEGATE_EXPR) + tmp1 = TREE_OPERAND (tmp1, 0); + /* The only case we can still associate with two variables + is if they are the same, modulo negation. */ + if (!operand_equal_p (tmp0, tmp1, 0)) + ok = false; + } + /* Only do something if we found more than two objects. Otherwise, nothing has changed and we risk infinite recursion. */ - if (2 < ((var0 != 0) + (var1 != 0) - + (con0 != 0) + (con1 != 0) - + (lit0 != 0) + (lit1 != 0) - + (minus_lit0 != 0) + (minus_lit1 != 0))) + if (ok + && (2 < ((var0 != 0) + (var1 != 0) + + (con0 != 0) + (con1 != 0) + + (lit0 != 0) + (lit1 != 0) + + (minus_lit0 != 0) + (minus_lit1 != 0)))) { /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */ if (code == MINUS_EXPR) @@ -8752,7 +9509,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */ if (TREE_CODE (arg0) == NEGATE_EXPR && (FLOAT_TYPE_P (type) - || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)) + || INTEGRAL_TYPE_P (type)) && negate_expr_p (arg1) && reorder_operands_p (arg0, arg1)) return fold_build2 (MINUS_EXPR, type, negate_expr (arg1), @@ -8760,14 +9517,15 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* Convert -A - 1 to ~A. */ if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR - && integer_onep (arg1)) + && integer_onep (arg1) + && !TYPE_OVERFLOW_TRAPS (type)) return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, TREE_OPERAND (arg0, 0))); /* Convert -1 - A to ~A. */ if (INTEGRAL_TYPE_P (type) && integer_all_onesp (arg0)) - return fold_build1 (BIT_NOT_EXPR, type, arg1); + return fold_build1 (BIT_NOT_EXPR, type, op1); if (! FLOAT_TYPE_P (type)) { @@ -8822,6 +9580,43 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) return negate_expr (fold_convert (type, arg1)); + /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to + __complex__ ( x, -y ). This is not the same for SNaNs or if + signed zeros are involved. */ + if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) + && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) + && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) + { + tree rtype = TREE_TYPE (TREE_TYPE (arg0)); + tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0); + tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0); + bool arg0rz = false, arg0iz = false; + if ((arg0r && (arg0rz = real_zerop (arg0r))) + || (arg0i && (arg0iz = real_zerop (arg0i)))) + { + tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1); + tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1); + if (arg0rz && arg1i && real_zerop (arg1i)) + { + tree rp = fold_build1 (NEGATE_EXPR, rtype, + arg1r ? arg1r + : build1 (REALPART_EXPR, rtype, arg1)); + tree ip = arg0i ? arg0i + : build1 (IMAGPART_EXPR, rtype, arg0); + return fold_build2 (COMPLEX_EXPR, type, rp, ip); + } + else if (arg0iz && arg1r && real_zerop (arg1r)) + { + tree rp = arg0r ? arg0r + : build1 (REALPART_EXPR, rtype, arg0); + tree ip = fold_build1 (NEGATE_EXPR, rtype, + arg1i ? arg1i + : build1 (IMAGPART_EXPR, rtype, arg1)); + return fold_build2 (COMPLEX_EXPR, type, rp, ip); + } + } + } + /* Fold &x - &x. This can happen from &x.foo - &x. This is unsafe for certain floats even in non-IEEE formats. In IEEE, it is unsafe because it does wrong for NaNs. @@ -8838,7 +9633,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* Avoid this transformation if B is a positive REAL_CST. */ && (TREE_CODE (arg1) != REAL_CST || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))) - || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))) + || INTEGRAL_TYPE_P (type))) return fold_build2 (PLUS_EXPR, type, fold_convert (type, arg0), fold_convert (type, negate_expr (arg1))); @@ -8923,6 +9718,14 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* Transform x * -1 into -x. */ if (integer_all_onesp (arg1)) return fold_convert (type, negate_expr (arg0)); + /* Transform x * -C into -x * C if x is easily negatable. */ + if (TREE_CODE (arg1) == INTEGER_CST + && tree_int_cst_sgn (arg1) == -1 + && negate_expr_p (arg0) + && (tem = negate_expr (arg1)) != arg1 + && !TREE_OVERFLOW (tem)) + return fold_build2 (MULT_EXPR, type, + negate_expr (arg0), tem); /* (a * (1 << b)) is (a << b) */ if (TREE_CODE (arg1) == LSHIFT_EXPR @@ -8934,11 +9737,20 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) return fold_build2 (LSHIFT_EXPR, type, arg1, TREE_OPERAND (arg0, 1)); + strict_overflow_p = false; if (TREE_CODE (arg1) == INTEGER_CST && 0 != (tem = extract_muldiv (op0, fold_convert (type, arg1), - code, NULL_TREE))) - return fold_convert (type, tem); + code, NULL_TREE, + &strict_overflow_p))) + { + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not " + "occur when simplifying " + "multiplication"), + WARN_STRICT_OVERFLOW_MISC); + return fold_convert (type, tem); + } /* Optimize z * conj(z) for integer complex numbers. */ if (TREE_CODE (arg0) == CONJ_EXPR @@ -8992,6 +9804,28 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) } } + /* Fold z * +-I to __complex__ (-+__imag z, +-__real z). + This is not the same for NaNs or if signed zeros are + involved. */ + if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) + && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) + && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)) + && TREE_CODE (arg1) == COMPLEX_CST + && real_zerop (TREE_REALPART (arg1))) + { + tree rtype = TREE_TYPE (TREE_TYPE (arg0)); + if (real_onep (TREE_IMAGPART (arg1))) + return fold_build2 (COMPLEX_EXPR, type, + negate_expr (fold_build1 (IMAGPART_EXPR, + rtype, arg0)), + fold_build1 (REALPART_EXPR, rtype, arg0)); + else if (real_minus_onep (TREE_IMAGPART (arg1))) + return fold_build2 (COMPLEX_EXPR, type, + fold_build1 (IMAGPART_EXPR, rtype, arg0), + negate_expr (fold_build1 (REALPART_EXPR, + rtype, arg0))); + } + /* Optimize z * conj(z) for floating point complex numbers. Guarded by flag_unsafe_math_optimizations as non-finite imaginary components don't produce scalar results. */ @@ -9012,9 +9846,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* Optimizations of root(...)*root(...). */ if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0)) { - tree rootfn, arg, arglist; - tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); - tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1)); + tree rootfn, arg; + tree arg00 = CALL_EXPR_ARG (arg0, 0); + tree arg10 = CALL_EXPR_ARG (arg1, 0); /* Optimize sqrt(x)*sqrt(x) as x. */ if (BUILTIN_SQRT_P (fcode0) @@ -9023,21 +9857,19 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) return arg00; /* Optimize root(x)*root(y) as root(x*y). */ - rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); + rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); arg = fold_build2 (MULT_EXPR, type, arg00, arg10); - arglist = build_tree_list (NULL_TREE, arg); - return build_function_call_expr (rootfn, arglist); + return build_call_expr (rootfn, 1, arg); } /* Optimize expN(x)*expN(y) as expN(x+y). */ if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0)) { - tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); + tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); tree arg = fold_build2 (PLUS_EXPR, type, - TREE_VALUE (TREE_OPERAND (arg0, 1)), - TREE_VALUE (TREE_OPERAND (arg1, 1))); - tree arglist = build_tree_list (NULL_TREE, arg); - return build_function_call_expr (expfn, arglist); + CALL_EXPR_ARG (arg0, 0), + CALL_EXPR_ARG (arg1, 0)); + return build_call_expr (expfn, 1, arg); } /* Optimizations of pow(...)*pow(...). */ @@ -9045,33 +9877,25 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF) || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL)) { - tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); - tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, - 1))); - tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1)); - tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, - 1))); + tree arg00 = CALL_EXPR_ARG (arg0, 0); + tree arg01 = CALL_EXPR_ARG (arg0, 1); + tree arg10 = CALL_EXPR_ARG (arg1, 0); + tree arg11 = CALL_EXPR_ARG (arg1, 1); /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */ if (operand_equal_p (arg01, arg11, 0)) { - tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); + tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10); - tree arglist = tree_cons (NULL_TREE, arg, - build_tree_list (NULL_TREE, - arg01)); - return build_function_call_expr (powfn, arglist); + return build_call_expr (powfn, 2, arg, arg01); } /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */ if (operand_equal_p (arg00, arg10, 0)) { - tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); + tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11); - tree arglist = tree_cons (NULL_TREE, arg00, - build_tree_list (NULL_TREE, - arg)); - return build_function_call_expr (powfn, arglist); + return build_call_expr (powfn, 2, arg00, arg); } } @@ -9082,14 +9906,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN) || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF) || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL)) - && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)), - TREE_VALUE (TREE_OPERAND (arg1, 1)), 0)) + && operand_equal_p (CALL_EXPR_ARG (arg0, 0), + CALL_EXPR_ARG (arg1, 0), 0)) { tree sinfn = mathfn_built_in (type, BUILT_IN_SIN); if (sinfn != NULL_TREE) - return build_function_call_expr (sinfn, - TREE_OPERAND (arg0, 1)); + return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0)); } /* Optimize x*pow(x,c) as pow(x,c+1). */ @@ -9097,23 +9920,20 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || fcode1 == BUILT_IN_POWF || fcode1 == BUILT_IN_POWL) { - tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1)); - tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, - 1))); + tree arg10 = CALL_EXPR_ARG (arg1, 0); + tree arg11 = CALL_EXPR_ARG (arg1, 1); if (TREE_CODE (arg11) == REAL_CST - && ! TREE_CONSTANT_OVERFLOW (arg11) + && !TREE_OVERFLOW (arg11) && operand_equal_p (arg0, arg10, 0)) { - tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0); + tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); REAL_VALUE_TYPE c; - tree arg, arglist; + tree arg; c = TREE_REAL_CST (arg11); real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); arg = build_real (type, c); - arglist = build_tree_list (NULL_TREE, arg); - arglist = tree_cons (NULL_TREE, arg0, arglist); - return build_function_call_expr (powfn, arglist); + return build_call_expr (powfn, 2, arg0, arg); } } @@ -9122,23 +9942,20 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || fcode0 == BUILT_IN_POWF || fcode0 == BUILT_IN_POWL) { - tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); - tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, - 1))); + tree arg00 = CALL_EXPR_ARG (arg0, 0); + tree arg01 = CALL_EXPR_ARG (arg0, 1); if (TREE_CODE (arg01) == REAL_CST - && ! TREE_CONSTANT_OVERFLOW (arg01) + && !TREE_OVERFLOW (arg01) && operand_equal_p (arg1, arg00, 0)) { - tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); + tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); REAL_VALUE_TYPE c; - tree arg, arglist; + tree arg; c = TREE_REAL_CST (arg01); real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); arg = build_real (type, c); - arglist = build_tree_list (NULL_TREE, arg); - arglist = tree_cons (NULL_TREE, arg1, arglist); - return build_function_call_expr (powfn, arglist); + return build_call_expr (powfn, 2, arg1, arg); } } @@ -9151,9 +9968,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (powfn) { tree arg = build_real (type, dconst2); - tree arglist = build_tree_list (NULL_TREE, arg); - arglist = tree_cons (NULL_TREE, arg0, arglist); - return build_function_call_expr (powfn, arglist); + return build_call_expr (powfn, 2, arg0, arg); } } } @@ -9173,8 +9988,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg0) == BIT_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) { - t1 = build_int_cst (type, -1); - t1 = force_fit_type (t1, 0, false, false); + t1 = build_int_cst_type (type, -1); return omit_one_operand (type, t1, arg1); } @@ -9182,8 +9996,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg1) == BIT_NOT_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) { - t1 = build_int_cst (type, -1); - t1 = force_fit_type (t1, 0, false, false); + t1 = build_int_cst_type (type, -1); return omit_one_operand (type, t1, arg0); } @@ -9289,8 +10102,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg0) == BIT_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) { - t1 = build_int_cst (type, -1); - t1 = force_fit_type (t1, 0, false, false); + t1 = build_int_cst_type (type, -1); return omit_one_operand (type, t1, arg1); } @@ -9298,8 +10110,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg1) == BIT_NOT_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) { - t1 = build_int_cst (type, -1); - t1 = force_fit_type (t1, 0, false, false); + t1 = build_int_cst_type (type, -1); return omit_one_operand (type, t1, arg0); } @@ -9374,6 +10185,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) fold_convert (type, TREE_OPERAND (arg0, 0)), fold_convert (type, TREE_OPERAND (arg1, 0))); + /* Convert ~X ^ C to X ^ ~C. */ + if (TREE_CODE (arg0) == BIT_NOT_EXPR + && TREE_CODE (arg1) == INTEGER_CST) + return fold_build2 (code, type, + fold_convert (type, TREE_OPERAND (arg0, 0)), + fold_build1 (BIT_NOT_EXPR, type, arg1)); + /* Fold (X & 1) ^ 1 as (X & 1) == 0. */ if (TREE_CODE (arg0) == BIT_AND_EXPR && integer_onep (TREE_OPERAND (arg0, 1)) @@ -9682,29 +10500,27 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS) || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF) || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL)) - && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)), - TREE_VALUE (TREE_OPERAND (arg1, 1)), 0)) + && operand_equal_p (CALL_EXPR_ARG (arg0, 0), + CALL_EXPR_ARG (arg1, 0), 0)) { tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); if (tanfn != NULL_TREE) - return build_function_call_expr (tanfn, - TREE_OPERAND (arg0, 1)); + return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0)); } /* Optimize cos(x)/sin(x) as 1.0/tan(x). */ if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN) || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF) || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL)) - && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)), - TREE_VALUE (TREE_OPERAND (arg1, 1)), 0)) + && operand_equal_p (CALL_EXPR_ARG (arg0, 0), + CALL_EXPR_ARG (arg1, 0), 0)) { tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); if (tanfn != NULL_TREE) { - tree tmp = TREE_OPERAND (arg0, 1); - tmp = build_function_call_expr (tanfn, tmp); + tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0)); return fold_build2 (RDIV_EXPR, type, build_real (type, dconst1), tmp); } @@ -9716,8 +10532,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF) || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL))) { - tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); - tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1)); + tree arg00 = CALL_EXPR_ARG (arg0, 0); + tree arg01 = CALL_EXPR_ARG (arg1, 0); if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))) && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00))) @@ -9726,8 +10542,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree cosfn = mathfn_built_in (type, BUILT_IN_COS); if (cosfn != NULL_TREE) - return build_function_call_expr (cosfn, - TREE_OPERAND (arg0, 1)); + return build_call_expr (cosfn, 1, arg00); } } @@ -9737,8 +10552,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF) || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL))) { - tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); - tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1)); + tree arg00 = CALL_EXPR_ARG (arg0, 0); + tree arg01 = CALL_EXPR_ARG (arg1, 0); if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00))) && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00))) @@ -9748,8 +10563,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (cosfn != NULL_TREE) { - tree tmp = TREE_OPERAND (arg0, 1); - tmp = build_function_call_expr (cosfn, tmp); + tree tmp = build_call_expr (cosfn, 1, arg00); return fold_build2 (RDIV_EXPR, type, build_real (type, dconst1), tmp); @@ -9762,33 +10576,29 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || fcode0 == BUILT_IN_POWF || fcode0 == BUILT_IN_POWL) { - tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1)); - tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1))); + tree arg00 = CALL_EXPR_ARG (arg0, 0); + tree arg01 = CALL_EXPR_ARG (arg0, 1); if (TREE_CODE (arg01) == REAL_CST - && ! TREE_CONSTANT_OVERFLOW (arg01) + && !TREE_OVERFLOW (arg01) && operand_equal_p (arg1, arg00, 0)) { - tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); + tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); REAL_VALUE_TYPE c; - tree arg, arglist; + tree arg; c = TREE_REAL_CST (arg01); real_arithmetic (&c, MINUS_EXPR, &c, &dconst1); arg = build_real (type, c); - arglist = build_tree_list (NULL_TREE, arg); - arglist = tree_cons (NULL_TREE, arg1, arglist); - return build_function_call_expr (powfn, arglist); + return build_call_expr (powfn, 2, arg1, arg); } } /* Optimize x/expN(y) into x*expN(-y). */ if (BUILTIN_EXPONENT_P (fcode1)) { - tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0); - tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1))); - tree arglist = build_tree_list (NULL_TREE, - fold_convert (type, arg)); - arg1 = build_function_call_expr (expfn, arglist); + tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); + tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0)); + arg1 = build_call_expr (expfn, 1, fold_convert (type, arg)); return fold_build2 (MULT_EXPR, type, arg0, arg1); } @@ -9797,13 +10607,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || fcode1 == BUILT_IN_POWF || fcode1 == BUILT_IN_POWL) { - tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0); - tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1)); - tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1))); + tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); + tree arg10 = CALL_EXPR_ARG (arg1, 0); + tree arg11 = CALL_EXPR_ARG (arg1, 1); tree neg11 = fold_convert (type, negate_expr (arg11)); - tree arglist = tree_cons(NULL_TREE, arg10, - build_tree_list (NULL_TREE, neg11)); - arg1 = build_function_call_expr (powfn, arglist); + arg1 = build_call_expr (powfn, 2, arg10, neg11); return fold_build2 (MULT_EXPR, type, arg0, arg1); } } @@ -9813,8 +10621,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case FLOOR_DIV_EXPR: /* Simplify A / (B << N) where A and B are positive and B is a power of 2, to A >> (N + log2(B)). */ + strict_overflow_p = false; if (TREE_CODE (arg1) == LSHIFT_EXPR - && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))) + && (TYPE_UNSIGNED (type) + || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))) { tree sval = TREE_OPERAND (arg1, 0); if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0) @@ -9822,6 +10632,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree sh_cnt = TREE_OPERAND (arg1, 1); unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval)); + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not " + "occur when simplifying A / (B << N)"), + WARN_STRICT_OVERFLOW_MISC); + sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt), sh_cnt, build_int_cst (NULL_TREE, pow2)); return fold_build2 (RSHIFT_EXPR, type, @@ -9846,16 +10661,30 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* Convert -A / -B to A / B when the type is signed and overflow is undefined. */ - if (!TYPE_UNSIGNED (type) && !flag_wrapv + if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) && TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), - negate_expr (arg1)); - if (!TYPE_UNSIGNED (type) && !flag_wrapv + { + if (INTEGRAL_TYPE_P (type)) + fold_overflow_warning (("assuming signed overflow does not occur " + "when distributing negation across " + "division"), + WARN_STRICT_OVERFLOW_MISC); + return fold_build2 (code, type, TREE_OPERAND (arg0, 0), + negate_expr (arg1)); + } + if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) && TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) - return fold_build2 (code, type, negate_expr (arg0), - TREE_OPERAND (arg1, 0)); + { + if (INTEGRAL_TYPE_P (type)) + fold_overflow_warning (("assuming signed overflow does not occur " + "when distributing negation across " + "division"), + WARN_STRICT_OVERFLOW_MISC); + return fold_build2 (code, type, negate_expr (arg0), + TREE_OPERAND (arg1, 0)); + } /* If arg0 is a multiple of arg1, then rewrite to the fastest div operation, EXACT_DIV_EXPR. @@ -9867,9 +10696,17 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && multiple_of_p (type, arg0, arg1)) return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1); + strict_overflow_p = false; if (TREE_CODE (arg1) == INTEGER_CST - && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE))) - return fold_convert (type, tem); + && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, + &strict_overflow_p))) + { + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not occur " + "when simplifying division"), + WARN_STRICT_OVERFLOW_MISC); + return fold_convert (type, tem); + } return NULL_TREE; @@ -9901,8 +10738,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, i.e. "X % C" into "X & (C - 1)", if X and C are positive. */ + strict_overflow_p = false; if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR) - && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))) + && (TYPE_UNSIGNED (type) + || tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))) { tree c = arg1; /* Also optimize A % (C << N) where C is a power of 2, @@ -9912,8 +10751,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0) { - tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), - arg1, integer_one_node); + tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1, + build_int_cst (TREE_TYPE (arg1), 1)); + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not " + "occur when simplifying " + "X % (power of two)"), + WARN_STRICT_OVERFLOW_MISC); return fold_build2 (BIT_AND_EXPR, type, fold_convert (type, arg0), fold_convert (type, mask)); @@ -9924,9 +10768,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (code == TRUNC_MOD_EXPR && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST - && !TREE_CONSTANT_OVERFLOW (arg1) + && !TREE_OVERFLOW (arg1) && TREE_INT_CST_HIGH (arg1) < 0 - && !flag_trapv + && !TYPE_OVERFLOW_TRAPS (type) /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */ && !sign_bit_p (arg1, arg1)) return fold_build2 (code, type, fold_convert (type, arg0), @@ -9936,13 +10780,20 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (code == TRUNC_MOD_EXPR && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == NEGATE_EXPR - && !flag_trapv) + && !TYPE_OVERFLOW_TRAPS (type)) return fold_build2 (code, type, fold_convert (type, arg0), fold_convert (type, TREE_OPERAND (arg1, 0))); if (TREE_CODE (arg1) == INTEGER_CST - && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE))) - return fold_convert (type, tem); + && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, + &strict_overflow_p))) + { + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not occur " + "when simplifying modulos"), + WARN_STRICT_OVERFLOW_MISC); + return fold_convert (type, tem); + } return NULL_TREE; @@ -10025,9 +10876,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) RROTATE_EXPR by a new constant. */ if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST) { - tree tem = build_int_cst (NULL_TREE, + tree tem = build_int_cst (TREE_TYPE (arg1), GET_MODE_BITSIZE (TYPE_MODE (type))); - tem = fold_convert (TREE_TYPE (arg1), tem); tem = const_binop (MINUS_EXPR, tem, arg1, 0); return fold_build2 (RROTATE_EXPR, type, arg0, tem); } @@ -10287,21 +11137,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && code == EQ_EXPR) return fold_build1 (TRUTH_NOT_EXPR, type, arg0); - /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */ - if (TREE_CODE (arg0) == BIT_NOT_EXPR - && TREE_CODE (arg1) == INTEGER_CST) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), - fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), - arg1)); - - /* If this is an equality comparison of the address of a non-weak - object against zero, then we know the result. */ - if (TREE_CODE (arg0) == ADDR_EXPR - && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) - && ! DECL_WEAK (TREE_OPERAND (arg0, 0)) - && integer_zerop (arg1)) - return constant_boolean_node (code != EQ_EXPR, type); - /* If this is an equality comparison of the address of two non-weak, unaliased symbols neither of which are extern (since we do not have access to attributes for externs), then we know the result. */ @@ -10346,7 +11181,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ? MINUS_EXPR : PLUS_EXPR, fold_convert (TREE_TYPE (arg0), arg1), TREE_OPERAND (arg0, 1), 0)) - && ! TREE_CONSTANT_OVERFLOW (tem)) + && !TREE_OVERFLOW (tem)) return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); /* Similarly for a NEGATE_EXPR. */ @@ -10354,9 +11189,36 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (arg1) == INTEGER_CST && 0 != (tem = negate_expr (arg1)) && TREE_CODE (tem) == INTEGER_CST - && ! TREE_CONSTANT_OVERFLOW (tem)) + && !TREE_OVERFLOW (tem)) return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); + /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */ + if (TREE_CODE (arg0) == BIT_XOR_EXPR + && TREE_CODE (arg1) == INTEGER_CST + && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) + return fold_build2 (code, type, TREE_OPERAND (arg0, 0), + fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0), + fold_convert (TREE_TYPE (arg0), arg1), + TREE_OPERAND (arg0, 1))); + + /* Transform comparisons of the form X +- C CMP X. */ + if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) + && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) + && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST + && (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) + || POINTER_TYPE_P (TREE_TYPE (arg0)))) + { + tree cst = TREE_OPERAND (arg0, 1); + + if (code == EQ_EXPR + && !integer_zerop (cst)) + return omit_two_operands (type, boolean_false_node, + TREE_OPERAND (arg0, 0), arg1); + else + return omit_two_operands (type, boolean_true_node, + TREE_OPERAND (arg0, 0), arg1); + } + /* If we have X - Y == 0, we can convert that to X == Y and similarly for !=. Don't do this for ordered comparisons due to overflow. */ if (TREE_CODE (arg0) == MINUS_EXPR @@ -10446,7 +11308,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) unsigned HOST_WIDE_INT log2 = tree_log2 (arg01); /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0 can be rewritten as (X & (C2 << C1)) != 0. */ - if ((log2 + TREE_INT_CST_LOW (arg01)) < prec) + if ((log2 + TREE_INT_CST_LOW (arg001)) < prec) { tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001); tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem); @@ -10523,8 +11385,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) } /* If this is a comparison of a field, we may be able to simplify it. */ - if (((TREE_CODE (arg0) == COMPONENT_REF - && lang_hooks.can_use_bit_fields_p ()) + if ((TREE_CODE (arg0) == COMPONENT_REF || TREE_CODE (arg0) == BIT_FIELD_REF) /* Handle the constant case even without -O to make sure the warnings are given. */ @@ -10545,16 +11406,14 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && integer_zerop (arg1)) { tree fndecl = get_callee_fndecl (arg0); - tree arglist; if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN - && (arglist = TREE_OPERAND (arg0, 1)) - && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE - && ! TREE_CHAIN (arglist)) + && call_expr_nargs (arg0) == 1 + && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE) { - tree iref = build_fold_indirect_ref (TREE_VALUE (arglist)); + tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0)); return fold_build2 (code, type, iref, build_int_cst (TREE_TYPE (iref), 0)); } @@ -10659,6 +11518,160 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree res = constant_boolean_node (code==NE_EXPR, type); return omit_one_operand (type, res, arg0); } + + /* Fold -X op -Y as X op Y, where op is eq/ne. */ + if (TREE_CODE (arg0) == NEGATE_EXPR + && TREE_CODE (arg1) == NEGATE_EXPR) + return fold_build2 (code, type, + TREE_OPERAND (arg0, 0), + TREE_OPERAND (arg1, 0)); + + /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */ + if (TREE_CODE (arg0) == BIT_AND_EXPR + && TREE_CODE (arg1) == BIT_AND_EXPR) + { + tree arg00 = TREE_OPERAND (arg0, 0); + tree arg01 = TREE_OPERAND (arg0, 1); + tree arg10 = TREE_OPERAND (arg1, 0); + tree arg11 = TREE_OPERAND (arg1, 1); + tree itype = TREE_TYPE (arg0); + + if (operand_equal_p (arg01, arg11, 0)) + return fold_build2 (code, type, + fold_build2 (BIT_AND_EXPR, itype, + fold_build2 (BIT_XOR_EXPR, itype, + arg00, arg10), + arg01), + build_int_cst (itype, 0)); + + if (operand_equal_p (arg01, arg10, 0)) + return fold_build2 (code, type, + fold_build2 (BIT_AND_EXPR, itype, + fold_build2 (BIT_XOR_EXPR, itype, + arg00, arg11), + arg01), + build_int_cst (itype, 0)); + + if (operand_equal_p (arg00, arg11, 0)) + return fold_build2 (code, type, + fold_build2 (BIT_AND_EXPR, itype, + fold_build2 (BIT_XOR_EXPR, itype, + arg01, arg10), + arg00), + build_int_cst (itype, 0)); + + if (operand_equal_p (arg00, arg10, 0)) + return fold_build2 (code, type, + fold_build2 (BIT_AND_EXPR, itype, + fold_build2 (BIT_XOR_EXPR, itype, + arg01, arg11), + arg00), + build_int_cst (itype, 0)); + } + + if (TREE_CODE (arg0) == BIT_XOR_EXPR + && TREE_CODE (arg1) == BIT_XOR_EXPR) + { + tree arg00 = TREE_OPERAND (arg0, 0); + tree arg01 = TREE_OPERAND (arg0, 1); + tree arg10 = TREE_OPERAND (arg1, 0); + tree arg11 = TREE_OPERAND (arg1, 1); + tree itype = TREE_TYPE (arg0); + + /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries. + operand_equal_p guarantees no side-effects so we don't need + to use omit_one_operand on Z. */ + if (operand_equal_p (arg01, arg11, 0)) + return fold_build2 (code, type, arg00, arg10); + if (operand_equal_p (arg01, arg10, 0)) + return fold_build2 (code, type, arg00, arg11); + if (operand_equal_p (arg00, arg11, 0)) + return fold_build2 (code, type, arg01, arg10); + if (operand_equal_p (arg00, arg10, 0)) + return fold_build2 (code, type, arg01, arg11); + + /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */ + if (TREE_CODE (arg01) == INTEGER_CST + && TREE_CODE (arg11) == INTEGER_CST) + return fold_build2 (code, type, + fold_build2 (BIT_XOR_EXPR, itype, arg00, + fold_build2 (BIT_XOR_EXPR, itype, + arg01, arg11)), + arg10); + } + + /* Attempt to simplify equality/inequality comparisons of complex + values. Only lower the comparison if the result is known or + can be simplified to a single scalar comparison. */ + if ((TREE_CODE (arg0) == COMPLEX_EXPR + || TREE_CODE (arg0) == COMPLEX_CST) + && (TREE_CODE (arg1) == COMPLEX_EXPR + || TREE_CODE (arg1) == COMPLEX_CST)) + { + tree real0, imag0, real1, imag1; + tree rcond, icond; + + if (TREE_CODE (arg0) == COMPLEX_EXPR) + { + real0 = TREE_OPERAND (arg0, 0); + imag0 = TREE_OPERAND (arg0, 1); + } + else + { + real0 = TREE_REALPART (arg0); + imag0 = TREE_IMAGPART (arg0); + } + + if (TREE_CODE (arg1) == COMPLEX_EXPR) + { + real1 = TREE_OPERAND (arg1, 0); + imag1 = TREE_OPERAND (arg1, 1); + } + else + { + real1 = TREE_REALPART (arg1); + imag1 = TREE_IMAGPART (arg1); + } + + rcond = fold_binary (code, type, real0, real1); + if (rcond && TREE_CODE (rcond) == INTEGER_CST) + { + if (integer_zerop (rcond)) + { + if (code == EQ_EXPR) + return omit_two_operands (type, boolean_false_node, + imag0, imag1); + return fold_build2 (NE_EXPR, type, imag0, imag1); + } + else + { + if (code == NE_EXPR) + return omit_two_operands (type, boolean_true_node, + imag0, imag1); + return fold_build2 (EQ_EXPR, type, imag0, imag1); + } + } + + icond = fold_binary (code, type, imag0, imag1); + if (icond && TREE_CODE (icond) == INTEGER_CST) + { + if (integer_zerop (icond)) + { + if (code == EQ_EXPR) + return omit_two_operands (type, boolean_false_node, + real0, real1); + return fold_build2 (NE_EXPR, type, real0, real1); + } + else + { + if (code == NE_EXPR) + return omit_two_operands (type, boolean_true_node, + real0, real1); + return fold_build2 (EQ_EXPR, type, real0, real1); + } + } + } + return NULL_TREE; case LT_EXPR: @@ -10675,8 +11688,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))) || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST - && !TYPE_UNSIGNED (TREE_TYPE (arg1)) - && !(flag_wrapv || flag_trapv)))) + && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))))) { tree arg01 = TREE_OPERAND (arg0, 1); enum tree_code code0 = TREE_CODE (arg0); @@ -10691,27 +11703,59 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (code == GT_EXPR && ((code0 == MINUS_EXPR && is_positive >= 0) || (code0 == PLUS_EXPR && is_positive <= 0))) - return constant_boolean_node (0, type); + { + if (TREE_CODE (arg01) == INTEGER_CST + && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) + fold_overflow_warning (("assuming signed overflow does not " + "occur when assuming that (X - c) > X " + "is always false"), + WARN_STRICT_OVERFLOW_ALL); + return constant_boolean_node (0, type); + } /* Likewise (X + c) < X becomes false. */ if (code == LT_EXPR && ((code0 == PLUS_EXPR && is_positive >= 0) || (code0 == MINUS_EXPR && is_positive <= 0))) - return constant_boolean_node (0, type); + { + if (TREE_CODE (arg01) == INTEGER_CST + && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) + fold_overflow_warning (("assuming signed overflow does not " + "occur when assuming that " + "(X + c) < X is always false"), + WARN_STRICT_OVERFLOW_ALL); + return constant_boolean_node (0, type); + } /* Convert (X - c) <= X to true. */ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) && code == LE_EXPR && ((code0 == MINUS_EXPR && is_positive >= 0) || (code0 == PLUS_EXPR && is_positive <= 0))) - return constant_boolean_node (1, type); + { + if (TREE_CODE (arg01) == INTEGER_CST + && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) + fold_overflow_warning (("assuming signed overflow does not " + "occur when assuming that " + "(X - c) <= X is always true"), + WARN_STRICT_OVERFLOW_ALL); + return constant_boolean_node (1, type); + } /* Convert (X + c) >= X to true. */ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))) && code == GE_EXPR && ((code0 == PLUS_EXPR && is_positive >= 0) || (code0 == MINUS_EXPR && is_positive <= 0))) - return constant_boolean_node (1, type); + { + if (TREE_CODE (arg01) == INTEGER_CST + && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) + fold_overflow_warning (("assuming signed overflow does not " + "occur when assuming that " + "(X + c) >= X is always true"), + WARN_STRICT_OVERFLOW_ALL); + return constant_boolean_node (1, type); + } if (TREE_CODE (arg01) == INTEGER_CST) { @@ -10719,23 +11763,51 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (code == GT_EXPR && ((code0 == PLUS_EXPR && is_positive > 0) || (code0 == MINUS_EXPR && is_positive < 0))) - return constant_boolean_node (1, type); + { + if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) + fold_overflow_warning (("assuming signed overflow does " + "not occur when assuming that " + "(X + c) > X is always true"), + WARN_STRICT_OVERFLOW_ALL); + return constant_boolean_node (1, type); + } if (code == LT_EXPR && ((code0 == MINUS_EXPR && is_positive > 0) || (code0 == PLUS_EXPR && is_positive < 0))) - return constant_boolean_node (1, type); + { + if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) + fold_overflow_warning (("assuming signed overflow does " + "not occur when assuming that " + "(X - c) < X is always true"), + WARN_STRICT_OVERFLOW_ALL); + return constant_boolean_node (1, type); + } /* Convert X + c <= X and X - c >= X to false for integers. */ if (code == LE_EXPR && ((code0 == PLUS_EXPR && is_positive > 0) || (code0 == MINUS_EXPR && is_positive < 0))) - return constant_boolean_node (0, type); + { + if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) + fold_overflow_warning (("assuming signed overflow does " + "not occur when assuming that " + "(X + c) <= X is always false"), + WARN_STRICT_OVERFLOW_ALL); + return constant_boolean_node (0, type); + } if (code == GE_EXPR && ((code0 == MINUS_EXPR && is_positive > 0) || (code0 == PLUS_EXPR && is_positive < 0))) - return constant_boolean_node (0, type); + { + if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1))) + fold_overflow_warning (("assuming signed overflow does " + "not occur when assuming that " + "(X - c) >= X is always false"), + WARN_STRICT_OVERFLOW_ALL); + return constant_boolean_node (0, type); + } } } @@ -10763,15 +11835,15 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) } /* Comparisons with the highest or lowest possible integer of - the specified size will have known values. */ + the specified precision will have known values. */ { - int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1))); + tree arg1_type = TREE_TYPE (arg1); + unsigned int width = TYPE_PRECISION (arg1_type); if (TREE_CODE (arg1) == INTEGER_CST - && ! TREE_CONSTANT_OVERFLOW (arg1) + && !TREE_OVERFLOW (arg1) && width <= 2 * HOST_BITS_PER_WIDE_INT - && (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) - || POINTER_TYPE_P (TREE_TYPE (arg1)))) + && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type))) { HOST_WIDE_INT signed_max_hi; unsigned HOST_WIDE_INT signed_max_lo; @@ -10784,7 +11856,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) signed_max_hi = 0; max_hi = 0; - if (TYPE_UNSIGNED (TREE_TYPE (arg1))) + if (TYPE_UNSIGNED (arg1_type)) { max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1; min_lo = 0; @@ -10806,7 +11878,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) max_lo = -1; min_lo = 0; - if (TYPE_UNSIGNED (TREE_TYPE (arg1))) + if (TYPE_UNSIGNED (arg1_type)) { max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1; min_hi = 0; @@ -10846,10 +11918,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) switch (code) { case GT_EXPR: - arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0); + arg1 = const_binop (PLUS_EXPR, arg1, + build_int_cst (TREE_TYPE (arg1), 1), 0); return fold_build2 (EQ_EXPR, type, arg0, arg1); case LE_EXPR: - arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0); + arg1 = const_binop (PLUS_EXPR, arg1, + build_int_cst (TREE_TYPE (arg1), 1), 0); return fold_build2 (NE_EXPR, type, arg0, arg1); default: break; @@ -10889,12 +11963,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) break; } - else if (!in_gimple_form - && TREE_INT_CST_HIGH (arg1) == signed_max_hi + else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi && TREE_INT_CST_LOW (arg1) == signed_max_lo - && TYPE_UNSIGNED (TREE_TYPE (arg1)) + && TYPE_UNSIGNED (arg1_type) + /* We will flip the signedness of the comparison operator + associated with the mode of arg1, so the sign bit is + specified by this mode. Check that arg1 is the signed + max associated with this sign bit. */ + && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type)) /* signed_type does not work on pointer types. */ - && INTEGRAL_TYPE_P (TREE_TYPE (arg1))) + && INTEGRAL_TYPE_P (arg1_type)) { /* The following case also applies to X < signed_max+1 and X >= signed_max+1 because previous transformations. */ @@ -10904,8 +11982,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0)); st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1)); return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR, - type, fold_convert (st0, arg0), - build_int_cst (st1, 0)); + type, fold_convert (st0, arg0), + build_int_cst (st1, 0)); } } } @@ -10922,7 +12000,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && ! TREE_SIDE_EFFECTS (arg0) && (0 != (tem = negate_expr (arg1))) && TREE_CODE (tem) == INTEGER_CST - && ! TREE_CONSTANT_OVERFLOW (tem)) + && !TREE_OVERFLOW (tem)) return fold_build2 (TRUTH_ANDIF_EXPR, type, build2 (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem), @@ -10930,18 +12008,34 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) TREE_OPERAND (arg0, 0), arg1)); /* Convert ABS_EXPR >= 0 to true. */ + strict_overflow_p = false; if (code == GE_EXPR - && tree_expr_nonnegative_p (arg0) && (integer_zerop (arg1) || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) - && real_zerop (arg1)))) - return omit_one_operand (type, integer_one_node, arg0); + && real_zerop (arg1))) + && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) + { + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not occur " + "when simplifying comparison of " + "absolute value and zero"), + WARN_STRICT_OVERFLOW_CONDITIONAL); + return omit_one_operand (type, integer_one_node, arg0); + } /* Convert ABS_EXPR < 0 to false. */ + strict_overflow_p = false; if (code == LT_EXPR - && tree_expr_nonnegative_p (arg0) - && (integer_zerop (arg1) || real_zerop (arg1))) - return omit_one_operand (type, integer_zero_node, arg0); + && (integer_zerop (arg1) || real_zerop (arg1)) + && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p)) + { + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not occur " + "when simplifying comparison of " + "absolute value and zero"), + WARN_STRICT_OVERFLOW_CONDITIONAL); + return omit_one_operand (type, integer_zero_node, arg0); + } /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0 and similarly for >= into !=. */ @@ -11226,13 +12320,76 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) /* A < 0 ? : 0 is simply (A & ). */ if (TREE_CODE (arg0) == LT_EXPR - && integer_zerop (TREE_OPERAND (arg0, 1)) - && integer_zerop (op2) - && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1))) - return fold_convert (type, - fold_build2 (BIT_AND_EXPR, - TREE_TYPE (tem), tem, - fold_convert (TREE_TYPE (tem), arg1))); + && integer_zerop (TREE_OPERAND (arg0, 1)) + && integer_zerop (op2) + && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1))) + { + /* sign_bit_p only checks ARG1 bits within A's precision. + If has wider type than A, bits outside + of A's precision in need to be checked. + If they are all 0, this optimization needs to be done + in unsigned A's type, if they are all 1 in signed A's type, + otherwise this can't be done. */ + if (TYPE_PRECISION (TREE_TYPE (tem)) + < TYPE_PRECISION (TREE_TYPE (arg1)) + && TYPE_PRECISION (TREE_TYPE (tem)) + < TYPE_PRECISION (type)) + { + unsigned HOST_WIDE_INT mask_lo; + HOST_WIDE_INT mask_hi; + int inner_width, outer_width; + tree tem_type; + + inner_width = TYPE_PRECISION (TREE_TYPE (tem)); + outer_width = TYPE_PRECISION (TREE_TYPE (arg1)); + if (outer_width > TYPE_PRECISION (type)) + outer_width = TYPE_PRECISION (type); + + if (outer_width > HOST_BITS_PER_WIDE_INT) + { + mask_hi = ((unsigned HOST_WIDE_INT) -1 + >> (2 * HOST_BITS_PER_WIDE_INT - outer_width)); + mask_lo = -1; + } + else + { + mask_hi = 0; + mask_lo = ((unsigned HOST_WIDE_INT) -1 + >> (HOST_BITS_PER_WIDE_INT - outer_width)); + } + if (inner_width > HOST_BITS_PER_WIDE_INT) + { + mask_hi &= ~((unsigned HOST_WIDE_INT) -1 + >> (HOST_BITS_PER_WIDE_INT - inner_width)); + mask_lo = 0; + } + else + mask_lo &= ~((unsigned HOST_WIDE_INT) -1 + >> (HOST_BITS_PER_WIDE_INT - inner_width)); + + if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi + && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo) + { + tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem)); + tem = fold_convert (tem_type, tem); + } + else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0 + && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0) + { + tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem)); + tem = fold_convert (tem_type, tem); + } + else + tem = NULL; + } + + if (tem) + return fold_convert (type, + fold_build2 (BIT_AND_EXPR, + TREE_TYPE (tem), tem, + fold_convert (TREE_TYPE (tem), + arg1))); + } /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was already handled above. */ @@ -11310,15 +12467,13 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) return NULL_TREE; case CALL_EXPR: - /* Check for a built-in function. */ - if (TREE_CODE (op0) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL - && DECL_BUILT_IN (TREE_OPERAND (op0, 0))) - return fold_builtin (TREE_OPERAND (op0, 0), op1, false); - return NULL_TREE; + /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses + of fold_ternary on them. */ + gcc_unreachable (); case BIT_FIELD_REF: - if (TREE_CODE (arg0) == VECTOR_CST + if ((TREE_CODE (arg0) == VECTOR_CST + || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0))) && type == TREE_TYPE (TREE_TYPE (arg0)) && host_integerp (arg1, 1) && host_integerp (op2, 1)) @@ -11332,7 +12487,18 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) && (idx = idx / width) < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))) { - tree elements = TREE_VECTOR_CST_ELTS (arg0); + tree elements = NULL_TREE; + + if (TREE_CODE (arg0) == VECTOR_CST) + elements = TREE_VECTOR_CST_ELTS (arg0); + else + { + unsigned HOST_WIDE_INT idx; + tree value; + + FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value) + elements = tree_cons (NULL_TREE, value, elements); + } while (idx-- > 0 && elements) elements = TREE_CHAIN (elements); if (elements) @@ -11373,7 +12539,20 @@ fold (tree expr) if (kind == tcc_constant) return t; - if (IS_EXPR_CODE_CLASS (kind)) + /* CALL_EXPR-like objects with variable numbers of operands are + treated specially. */ + if (kind == tcc_vl_exp) + { + if (code == CALL_EXPR) + { + tem = fold_call_expr (expr, false); + return tem ? tem : expr; + } + return expr; + } + + if (IS_EXPR_CODE_CLASS (kind) + || IS_GIMPLE_STMT_CODE_CLASS (kind)) { tree type = TREE_TYPE (t); tree op0, op1, op2; @@ -11565,7 +12744,8 @@ recursive_label: case tcc_unary: case tcc_binary: case tcc_statement: - len = TREE_CODE_LENGTH (code); + case tcc_vl_exp: + len = TREE_OPERAND_LENGTH (expr); for (i = 0; i < len; ++i) fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht); break; @@ -11744,7 +12924,8 @@ fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2 md5_finish_ctx (&ctx, checksum_before_op2); htab_empty (ht); #endif - + + gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); tem = fold_ternary (code, type, op0, op1, op2); if (!tem) tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT); @@ -11777,6 +12958,60 @@ fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2 return tem; } +/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS + arguments in ARGARRAY, and a null static chain. + Return a folded expression if successful. Otherwise, return a CALL_EXPR + of type TYPE from the given operands as constructed by build_call_array. */ + +tree +fold_build_call_array (tree type, tree fn, int nargs, tree *argarray) +{ + tree tem; +#ifdef ENABLE_FOLD_CHECKING + unsigned char checksum_before_fn[16], + checksum_before_arglist[16], + checksum_after_fn[16], + checksum_after_arglist[16]; + struct md5_ctx ctx; + htab_t ht; + int i; + + ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); + md5_init_ctx (&ctx); + fold_checksum_tree (fn, &ctx, ht); + md5_finish_ctx (&ctx, checksum_before_fn); + htab_empty (ht); + + md5_init_ctx (&ctx); + for (i = 0; i < nargs; i++) + fold_checksum_tree (argarray[i], &ctx, ht); + md5_finish_ctx (&ctx, checksum_before_arglist); + htab_empty (ht); +#endif + + tem = fold_builtin_call_array (type, fn, nargs, argarray); + +#ifdef ENABLE_FOLD_CHECKING + md5_init_ctx (&ctx); + fold_checksum_tree (fn, &ctx, ht); + md5_finish_ctx (&ctx, checksum_after_fn); + htab_empty (ht); + + if (memcmp (checksum_before_fn, checksum_after_fn, 16)) + fold_check_failed (fn, tem); + + md5_init_ctx (&ctx); + for (i = 0; i < nargs; i++) + fold_checksum_tree (argarray[i], &ctx, ht); + md5_finish_ctx (&ctx, checksum_after_arglist); + htab_delete (ht); + + if (memcmp (checksum_before_arglist, checksum_after_arglist, 16)) + fold_check_failed (NULL_TREE, tem); +#endif + return tem; +} + /* Perform constant folding and related simplification of initializer expression EXPR. These behave identically to "fold_buildN" but ignore potential run-time traps and exceptions that fold must preserve. */ @@ -11837,6 +13072,19 @@ fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1, return result; } +tree +fold_build_call_array_initializer (tree type, tree fn, + int nargs, tree *argarray) +{ + tree result; + START_FOLD_INIT; + + result = fold_build_call_array (type, fn, nargs, argarray); + + END_FOLD_INIT; + return result; +} + #undef START_FOLD_INIT #undef END_FOLD_INIT @@ -11880,7 +13128,7 @@ fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1, (where the same SAVE_EXPR (J) is used in the original and the transformed version). */ -static int +int multiple_of_p (tree type, tree top, tree bottom) { if (operand_equal_p (top, bottom, 0)) @@ -11922,7 +13170,7 @@ multiple_of_p (tree type, tree top, tree bottom) const_binop (LSHIFT_EXPR, size_one_node, op1, 0))) - && ! TREE_OVERFLOW (t1)) + && !TREE_OVERFLOW (t1)) return multiple_of_p (type, t1, bottom); } return 0; @@ -11945,24 +13193,27 @@ multiple_of_p (tree type, tree top, tree bottom) && (tree_int_cst_sgn (top) < 0 || tree_int_cst_sgn (bottom) < 0))) return 0; - return integer_zerop (const_binop (TRUNC_MOD_EXPR, - top, bottom, 0)); + return integer_zerop (int_const_binop (TRUNC_MOD_EXPR, + top, bottom, 0)); default: return 0; } } -/* Return true if `t' is known to be non-negative. */ +/* Return true if `t' is known to be non-negative. If the return + value is based on the assumption that signed overflow is undefined, + set *STRICT_OVERFLOW_P to true; otherwise, don't change + *STRICT_OVERFLOW_P. */ -int -tree_expr_nonnegative_p (tree t) +bool +tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p) { if (t == error_mark_node) - return 0; + return false; if (TYPE_UNSIGNED (TREE_TYPE (t))) - return 1; + return true; switch (TREE_CODE (t)) { @@ -11974,8 +13225,13 @@ tree_expr_nonnegative_p (tree t) case ABS_EXPR: /* We can't return 1 if flag_wrapv is set because ABS_EXPR = INT_MIN. */ - if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t)))) - return 1; + if (!INTEGRAL_TYPE_P (TREE_TYPE (t))) + return true; + if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))) + { + *strict_overflow_p = true; + return true; + } break; case INTEGER_CST: @@ -11986,8 +13242,10 @@ tree_expr_nonnegative_p (tree t) case PLUS_EXPR: if (FLOAT_TYPE_P (TREE_TYPE (t))) - return tree_expr_nonnegative_p (TREE_OPERAND (t, 0)) - && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)); + return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p) + && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p)); /* zero_extend(x) + zero_extend(y) is non-negative if x and y are both unsigned and at least 2 bits shorter than the result. */ @@ -12012,9 +13270,11 @@ tree_expr_nonnegative_p (tree t) { /* x * x for floating point x is always non-negative. */ if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0)) - return 1; - return tree_expr_nonnegative_p (TREE_OPERAND (t, 0)) - && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)); + return true; + return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p) + && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p)); } /* zero_extend(x) * zero_extend(y) is non-negative if x and y are @@ -12030,12 +13290,14 @@ tree_expr_nonnegative_p (tree t) return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2) < TYPE_PRECISION (TREE_TYPE (t)); } - return 0; + return false; case BIT_AND_EXPR: case MAX_EXPR: - return tree_expr_nonnegative_p (TREE_OPERAND (t, 0)) - || tree_expr_nonnegative_p (TREE_OPERAND (t, 1)); + return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p) + || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p)); case BIT_IOR_EXPR: case BIT_XOR_EXPR: @@ -12045,8 +13307,10 @@ tree_expr_nonnegative_p (tree t) case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR: case ROUND_DIV_EXPR: - return tree_expr_nonnegative_p (TREE_OPERAND (t, 0)) - && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)); + return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p) + && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p)); case TRUNC_MOD_EXPR: case CEIL_MOD_EXPR: @@ -12055,18 +13319,25 @@ tree_expr_nonnegative_p (tree t) case SAVE_EXPR: case NON_LVALUE_EXPR: case FLOAT_EXPR: - return tree_expr_nonnegative_p (TREE_OPERAND (t, 0)); + case FIX_TRUNC_EXPR: + return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p); case COMPOUND_EXPR: case MODIFY_EXPR: - return tree_expr_nonnegative_p (TREE_OPERAND (t, 1)); + case GIMPLE_MODIFY_STMT: + return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1), + strict_overflow_p); case BIND_EXPR: - return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1))); + return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)), + strict_overflow_p); case COND_EXPR: - return tree_expr_nonnegative_p (TREE_OPERAND (t, 1)) - && tree_expr_nonnegative_p (TREE_OPERAND (t, 2)); + return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p) + && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2), + strict_overflow_p)); case NOP_EXPR: { @@ -12076,18 +13347,21 @@ tree_expr_nonnegative_p (tree t) if (TREE_CODE (outer_type) == REAL_TYPE) { if (TREE_CODE (inner_type) == REAL_TYPE) - return tree_expr_nonnegative_p (TREE_OPERAND (t, 0)); + return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p); if (TREE_CODE (inner_type) == INTEGER_TYPE) { if (TYPE_UNSIGNED (inner_type)) - return 1; - return tree_expr_nonnegative_p (TREE_OPERAND (t, 0)); + return true; + return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p); } } else if (TREE_CODE (outer_type) == INTEGER_TYPE) { if (TREE_CODE (inner_type) == REAL_TYPE) - return tree_expr_nonnegative_p (TREE_OPERAND (t,0)); + return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0), + strict_overflow_p); if (TREE_CODE (inner_type) == INTEGER_TYPE) return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type) && TYPE_UNSIGNED (inner_type); @@ -12103,7 +13377,7 @@ tree_expr_nonnegative_p (tree t) /* If the initializer is non-void, then it's a normal expression that will be assigned to the slot. */ if (!VOID_TYPE_P (t)) - return tree_expr_nonnegative_p (t); + return tree_expr_nonnegative_warnv_p (t, strict_overflow_p); /* Otherwise, the initializer sets the slot in some way. One common way is an assignment statement at the end of the initializer. */ @@ -12119,17 +13393,18 @@ tree_expr_nonnegative_p (tree t) else break; } - if (TREE_CODE (t) == MODIFY_EXPR - && TREE_OPERAND (t, 0) == temp) - return tree_expr_nonnegative_p (TREE_OPERAND (t, 1)); + if ((TREE_CODE (t) == MODIFY_EXPR + || TREE_CODE (t) == GIMPLE_MODIFY_STMT) + && GENERIC_TREE_OPERAND (t, 0) == temp) + return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1), + strict_overflow_p); - return 0; + return false; } case CALL_EXPR: { tree fndecl = get_callee_fndecl (t); - tree arglist = TREE_OPERAND (t, 1); if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) switch (DECL_FUNCTION_CODE (fndecl)) { @@ -12148,14 +13423,17 @@ tree_expr_nonnegative_p (tree t) CASE_INT_FN (BUILT_IN_FFS): CASE_INT_FN (BUILT_IN_PARITY): CASE_INT_FN (BUILT_IN_POPCOUNT): + case BUILT_IN_BSWAP32: + case BUILT_IN_BSWAP64: /* Always true. */ - return 1; + return true; CASE_FLT_FN (BUILT_IN_SQRT): /* sqrt(-0.0) is -0.0. */ if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t)))) - return 1; - return tree_expr_nonnegative_p (TREE_VALUE (arglist)); + return true; + return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), + strict_overflow_p); CASE_FLT_FN (BUILT_IN_ASINH): CASE_FLT_FN (BUILT_IN_ATAN): @@ -12178,29 +13456,72 @@ tree_expr_nonnegative_p (tree t) CASE_FLT_FN (BUILT_IN_LROUND): CASE_FLT_FN (BUILT_IN_MODF): CASE_FLT_FN (BUILT_IN_NEARBYINT): - CASE_FLT_FN (BUILT_IN_POW): CASE_FLT_FN (BUILT_IN_RINT): CASE_FLT_FN (BUILT_IN_ROUND): + CASE_FLT_FN (BUILT_IN_SCALB): + CASE_FLT_FN (BUILT_IN_SCALBLN): + CASE_FLT_FN (BUILT_IN_SCALBN): CASE_FLT_FN (BUILT_IN_SIGNBIT): + CASE_FLT_FN (BUILT_IN_SIGNIFICAND): CASE_FLT_FN (BUILT_IN_SINH): CASE_FLT_FN (BUILT_IN_TANH): CASE_FLT_FN (BUILT_IN_TRUNC): /* True if the 1st argument is nonnegative. */ - return tree_expr_nonnegative_p (TREE_VALUE (arglist)); + return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), + strict_overflow_p); CASE_FLT_FN (BUILT_IN_FMAX): /* True if the 1st OR 2nd arguments are nonnegative. */ - return tree_expr_nonnegative_p (TREE_VALUE (arglist)) - || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist))); + return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), + strict_overflow_p) + || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1), + strict_overflow_p))); CASE_FLT_FN (BUILT_IN_FMIN): /* True if the 1st AND 2nd arguments are nonnegative. */ - return tree_expr_nonnegative_p (TREE_VALUE (arglist)) - && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist))); + return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), + strict_overflow_p) + && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1), + strict_overflow_p))); CASE_FLT_FN (BUILT_IN_COPYSIGN): /* True if the 2nd argument is nonnegative. */ - return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist))); + return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1), + strict_overflow_p); + + CASE_FLT_FN (BUILT_IN_POWI): + /* True if the 1st argument is nonnegative or the second + argument is an even integer. */ + if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST) + { + tree arg1 = CALL_EXPR_ARG (t, 1); + if ((TREE_INT_CST_LOW (arg1) & 1) == 0) + return true; + } + return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), + strict_overflow_p); + + CASE_FLT_FN (BUILT_IN_POW): + /* True if the 1st argument is nonnegative or the second + argument is an even integer valued real. */ + if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST) + { + REAL_VALUE_TYPE c; + HOST_WIDE_INT n; + + c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1)); + n = real_to_integer (&c); + if ((n & 1) == 0) + { + REAL_VALUE_TYPE cint; + real_from_integer (&cint, VOIDmode, n, + n < 0 ? -1 : 0, 0); + if (real_identical (&c, &cint)) + return true; + } + } + return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), + strict_overflow_p); default: break; @@ -12212,21 +13533,44 @@ tree_expr_nonnegative_p (tree t) default: if (truth_value_p (TREE_CODE (t))) /* Truth values evaluate to 0 or 1, which is nonnegative. */ - return 1; + return true; } /* We don't know sign of `t', so be conservative and return false. */ - return 0; + return false; +} + +/* Return true if `t' is known to be non-negative. Handle warnings + about undefined signed overflow. */ + +bool +tree_expr_nonnegative_p (tree t) +{ + bool ret, strict_overflow_p; + + strict_overflow_p = false; + ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p); + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not occur when " + "determining that expression is always " + "non-negative"), + WARN_STRICT_OVERFLOW_MISC); + return ret; } /* Return true when T is an address and is known to be nonzero. For floating point we further ensure that T is not denormal. - Similar logic is present in nonzero_address in rtlanal.h. */ + Similar logic is present in nonzero_address in rtlanal.h. + + If the return value is based on the assumption that signed overflow + is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't + change *STRICT_OVERFLOW_P. */ bool -tree_expr_nonzero_p (tree t) +tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p) { tree type = TREE_TYPE (t); + bool sub_strict_overflow_p; /* Doing something useful for floating point would need more work. */ if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type)) @@ -12240,33 +13584,45 @@ tree_expr_nonzero_p (tree t) return ssa_name_nonzero_p (t); case ABS_EXPR: - return tree_expr_nonzero_p (TREE_OPERAND (t, 0)); + return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p); case INTEGER_CST: - /* We used to test for !integer_zerop here. This does not work correctly - if TREE_CONSTANT_OVERFLOW (t). */ - return (TREE_INT_CST_LOW (t) != 0 - || TREE_INT_CST_HIGH (t) != 0); + return !integer_zerop (t); case PLUS_EXPR: - if (!TYPE_UNSIGNED (type) && !flag_wrapv) + if (TYPE_OVERFLOW_UNDEFINED (type)) { /* With the presence of negative values it is hard to say something. */ - if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0)) - || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1))) + sub_strict_overflow_p = false; + if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + &sub_strict_overflow_p) + || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), + &sub_strict_overflow_p)) return false; /* One of operands must be positive and the other non-negative. */ - return (tree_expr_nonzero_p (TREE_OPERAND (t, 0)) - || tree_expr_nonzero_p (TREE_OPERAND (t, 1))); + /* We don't set *STRICT_OVERFLOW_P here: even if this value + overflows, on a twos-complement machine the sum of two + nonnegative numbers can never be zero. */ + return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p) + || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p)); } break; case MULT_EXPR: - if (!TYPE_UNSIGNED (type) && !flag_wrapv) + if (TYPE_OVERFLOW_UNDEFINED (type)) { - return (tree_expr_nonzero_p (TREE_OPERAND (t, 0)) - && tree_expr_nonzero_p (TREE_OPERAND (t, 1))); + if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p) + && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p)) + { + *strict_overflow_p = true; + return true; + } } break; @@ -12276,7 +13632,8 @@ tree_expr_nonzero_p (tree t) tree outer_type = TREE_TYPE (t); return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type) - && tree_expr_nonzero_p (TREE_OPERAND (t, 0))); + && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p)); } break; @@ -12299,41 +13656,76 @@ tree_expr_nonzero_p (tree t) } case COND_EXPR: - return (tree_expr_nonzero_p (TREE_OPERAND (t, 1)) - && tree_expr_nonzero_p (TREE_OPERAND (t, 2))); + sub_strict_overflow_p = false; + if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), + &sub_strict_overflow_p) + && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2), + &sub_strict_overflow_p)) + { + if (sub_strict_overflow_p) + *strict_overflow_p = true; + return true; + } + break; case MIN_EXPR: - return (tree_expr_nonzero_p (TREE_OPERAND (t, 0)) - && tree_expr_nonzero_p (TREE_OPERAND (t, 1))); + sub_strict_overflow_p = false; + if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), + &sub_strict_overflow_p) + && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), + &sub_strict_overflow_p)) + { + if (sub_strict_overflow_p) + *strict_overflow_p = true; + } + break; case MAX_EXPR: - if (tree_expr_nonzero_p (TREE_OPERAND (t, 0))) + sub_strict_overflow_p = false; + if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), + &sub_strict_overflow_p)) { + if (sub_strict_overflow_p) + *strict_overflow_p = true; + /* When both operands are nonzero, then MAX must be too. */ - if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))) + if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p)) return true; /* MAX where operand 0 is positive is positive. */ - return tree_expr_nonnegative_p (TREE_OPERAND (t, 0)); + return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p); } /* MAX where operand 1 is positive is positive. */ - else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)) - && tree_expr_nonnegative_p (TREE_OPERAND (t, 1))) - return true; + else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), + &sub_strict_overflow_p) + && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1), + &sub_strict_overflow_p)) + { + if (sub_strict_overflow_p) + *strict_overflow_p = true; + return true; + } break; case COMPOUND_EXPR: case MODIFY_EXPR: + case GIMPLE_MODIFY_STMT: case BIND_EXPR: - return tree_expr_nonzero_p (TREE_OPERAND (t, 1)); + return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1), + strict_overflow_p); case SAVE_EXPR: case NON_LVALUE_EXPR: - return tree_expr_nonzero_p (TREE_OPERAND (t, 0)); + return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p); case BIT_IOR_EXPR: - return tree_expr_nonzero_p (TREE_OPERAND (t, 1)) - || tree_expr_nonzero_p (TREE_OPERAND (t, 0)); + return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1), + strict_overflow_p) + || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0), + strict_overflow_p)); case CALL_EXPR: return alloca_call_p (t); @@ -12344,6 +13736,24 @@ tree_expr_nonzero_p (tree t) return false; } +/* Return true when T is an address and is known to be nonzero. + Handle warnings about undefined signed overflow. */ + +bool +tree_expr_nonzero_p (tree t) +{ + bool ret, strict_overflow_p; + + strict_overflow_p = false; + ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p); + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not occur when " + "determining that expression is always " + "non-zero"), + WARN_STRICT_OVERFLOW_MISC); + return ret; +} + /* Given the components of a binary expression CODE, TYPE, OP0 and OP1, attempt to fold the expression to a constant without modifying TYPE, OP0 or OP1. @@ -12381,7 +13791,9 @@ fold_unary_to_constant (enum tree_code code, tree type, tree op0) tree fold_read_from_constant_string (tree exp) { - if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF) + if ((TREE_CODE (exp) == INDIRECT_REF + || TREE_CODE (exp) == ARRAY_REF) + && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE) { tree exp1 = TREE_OPERAND (exp, 0); tree index; @@ -12442,11 +13854,9 @@ fold_negate_const (tree arg0, tree type) int overflow = neg_double (TREE_INT_CST_LOW (arg0), TREE_INT_CST_HIGH (arg0), &low, &high); - t = build_int_cst_wide (type, low, high); - t = force_fit_type (t, 1, - (overflow | TREE_OVERFLOW (arg0)) - && !TYPE_UNSIGNED (type), - TREE_CONSTANT_OVERFLOW (arg0)); + t = force_fit_type_double (type, low, high, 1, + (overflow | TREE_OVERFLOW (arg0)) + && !TYPE_UNSIGNED (type)); break; } @@ -12490,9 +13900,8 @@ fold_abs_const (tree arg0, tree type) int overflow = neg_double (TREE_INT_CST_LOW (arg0), TREE_INT_CST_HIGH (arg0), &low, &high); - t = build_int_cst_wide (type, low, high); - t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0), - TREE_CONSTANT_OVERFLOW (arg0)); + t = force_fit_type_double (type, low, high, -1, + overflow | TREE_OVERFLOW (arg0)); } break; @@ -12520,11 +13929,9 @@ fold_not_const (tree arg0, tree type) gcc_assert (TREE_CODE (arg0) == INTEGER_CST); - t = build_int_cst_wide (type, - ~ TREE_INT_CST_LOW (arg0), - ~ TREE_INT_CST_HIGH (arg0)); - t = force_fit_type (t, 0, TREE_OVERFLOW (arg0), - TREE_CONSTANT_OVERFLOW (arg0)); + t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0), + ~TREE_INT_CST_HIGH (arg0), 0, + TREE_OVERFLOW (arg0)); return t; } @@ -12587,6 +13994,23 @@ fold_relational_const (enum tree_code code, tree type, tree op0, tree op1) return constant_boolean_node (real_compare (code, c0, c1), type); } + /* Handle equality/inequality of complex constants. */ + if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST) + { + tree rcond = fold_relational_const (code, type, + TREE_REALPART (op0), + TREE_REALPART (op1)); + tree icond = fold_relational_const (code, type, + TREE_IMAGPART (op0), + TREE_IMAGPART (op1)); + if (code == EQ_EXPR) + return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond); + else if (code == NE_EXPR) + return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond); + else + return NULL_TREE; + } + /* From here on we only handle LT, LE, GT, GE, EQ and NE. To compute GT, swap the arguments and do LT. @@ -12721,6 +14145,9 @@ fold_indirect_ref_1 (tree type, tree op0) { tree op = TREE_OPERAND (sub, 0); tree optype = TREE_TYPE (op); + /* *&CONST_DECL -> to the value of the const decl. */ + if (TREE_CODE (op) == CONST_DECL) + return DECL_INITIAL (op); /* *&p => p; make sure to handle *&"str"[cst] here. */ if (type == optype) { @@ -12744,6 +14171,14 @@ fold_indirect_ref_1 (tree type, tree op0) else if (TREE_CODE (optype) == COMPLEX_TYPE && type == TREE_TYPE (optype)) return fold_build1 (REALPART_EXPR, type, op); + /* *(foo *)&vectorfoo => BIT_FIELD_REF */ + else if (TREE_CODE (optype) == VECTOR_TYPE + && type == TREE_TYPE (optype)) + { + tree part_width = TYPE_SIZE (type); + tree index = bitsize_int (0); + return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); + } } /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ @@ -12890,12 +14325,38 @@ round_up (tree value, int divisor) /* If divisor is a power of two, simplify this to bit manipulation. */ if (divisor == (divisor & -divisor)) { - tree t; + if (TREE_CODE (value) == INTEGER_CST) + { + unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value); + unsigned HOST_WIDE_INT high; + bool overflow_p; - t = build_int_cst (TREE_TYPE (value), divisor - 1); - value = size_binop (PLUS_EXPR, value, t); - t = build_int_cst (TREE_TYPE (value), -divisor); - value = size_binop (BIT_AND_EXPR, value, t); + if ((low & (divisor - 1)) == 0) + return value; + + overflow_p = TREE_OVERFLOW (value); + high = TREE_INT_CST_HIGH (value); + low &= ~(divisor - 1); + low += divisor; + if (low == 0) + { + high++; + if (high == 0) + overflow_p = true; + } + + return force_fit_type_double (TREE_TYPE (value), low, high, + -1, overflow_p); + } + else + { + tree t; + + t = build_int_cst (TREE_TYPE (value), divisor - 1); + value = size_binop (PLUS_EXPR, value, t); + t = build_int_cst (TREE_TYPE (value), -divisor); + value = size_binop (BIT_AND_EXPR, value, t); + } } else { @@ -13051,9 +14512,48 @@ fold_strip_sign_ops (tree exp) arg1 ? arg1 : TREE_OPERAND (exp, 1)); break; + case COMPOUND_EXPR: + arg0 = TREE_OPERAND (exp, 0); + arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); + if (arg1) + return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1); + break; + + case COND_EXPR: + arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); + arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2)); + if (arg0 || arg1) + return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), + arg0 ? arg0 : TREE_OPERAND (exp, 1), + arg1 ? arg1 : TREE_OPERAND (exp, 2)); + break; + + case CALL_EXPR: + { + const enum built_in_function fcode = builtin_mathfn_code (exp); + switch (fcode) + { + CASE_FLT_FN (BUILT_IN_COPYSIGN): + /* Strip copysign function call, return the 1st argument. */ + arg0 = CALL_EXPR_ARG (exp, 0); + arg1 = CALL_EXPR_ARG (exp, 1); + return omit_one_operand (TREE_TYPE (exp), arg0, arg1); + + default: + /* Strip sign ops from the argument of "odd" math functions. */ + if (negate_mathfn_p (fcode)) + { + arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0)); + if (arg0) + return build_call_expr (get_callee_fndecl (exp), 1, arg0); + } + break; + } + } + break; + default: break; } return NULL_TREE; } -