X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ffold-const.c;h=4015f62e5cd0425089c6d2b6afee81e3dedd9730;hb=3992540609daf52db67593a2f4d2c95a537b991d;hp=6e5d9403e706ccee69af56e63bdd1c6761b28c3e;hpb=54bd17de2417e43de3c780efddd0665067dc7760;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 6e5d9403e70..4015f62e5cd 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1012,7 +1012,6 @@ fold_deferring_overflow_warnings_p (void) static void fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc) { - gcc_assert (!flag_wrapv && !flag_trapv); if (fold_deferring_overflow_warnings > 0) { if (fold_deferred_overflow_warning == NULL @@ -3027,6 +3026,11 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK) return 0; + /* Check equality of integer constants before bailing out due to + precision differences. */ + if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) + return tree_int_cst_equal (arg0, arg1); + /* If both types don't have the same signedness, then we can't consider them equal. We must check this before the STRIP_NOPS calls because they may change the signedness of the arguments. */ @@ -5547,7 +5551,7 @@ optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) { tree arg0 = op0; enum tree_code op_code; - tree comp_const = op1; + tree comp_const; tree minmax_const; int consts_equal, consts_lt; tree inner; @@ -5556,6 +5560,7 @@ optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) op_code = TREE_CODE (arg0); minmax_const = TREE_OPERAND (arg0, 1); + comp_const = fold_convert (TREE_TYPE (arg0), op1); consts_equal = tree_int_cst_equal (minmax_const, comp_const); consts_lt = tree_int_cst_lt (minmax_const, comp_const); inner = TREE_OPERAND (arg0, 0); @@ -6720,7 +6725,11 @@ fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1) || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type)) && (TREE_TYPE (arg1_unw) == shorter_type || (TYPE_PRECISION (shorter_type) - >= TYPE_PRECISION (TREE_TYPE (arg1_unw))) + > TYPE_PRECISION (TREE_TYPE (arg1_unw))) + || ((TYPE_PRECISION (shorter_type) + == TYPE_PRECISION (TREE_TYPE (arg1_unw))) + && (TYPE_UNSIGNED (shorter_type) + == TYPE_UNSIGNED (TREE_TYPE (arg1_unw)))) || (TREE_CODE (arg1_unw) == INTEGER_CST && (TREE_CODE (shorter_type) == INTEGER_TYPE || TREE_CODE (shorter_type) == BOOLEAN_TYPE) @@ -8385,6 +8394,62 @@ maybe_canonicalize_comparison (enum tree_code code, tree type, return t; } +/* Return whether BASE + OFFSET + BITPOS may wrap around the address + space. This is used to avoid issuing overflow warnings for + expressions like &p->x which can not wrap. */ + +static bool +pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos) +{ + unsigned HOST_WIDE_INT offset_low, total_low; + HOST_WIDE_INT size, offset_high, total_high; + + if (!POINTER_TYPE_P (TREE_TYPE (base))) + return true; + + if (bitpos < 0) + return true; + + if (offset == NULL_TREE) + { + offset_low = 0; + offset_high = 0; + } + else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset)) + return true; + else + { + offset_low = TREE_INT_CST_LOW (offset); + offset_high = TREE_INT_CST_HIGH (offset); + } + + if (add_double_with_sign (offset_low, offset_high, + bitpos / BITS_PER_UNIT, 0, + &total_low, &total_high, + true)) + return true; + + if (total_high != 0) + return true; + + size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base))); + if (size <= 0) + return true; + + /* We can do slightly better for SIZE if we have an ADDR_EXPR of an + array. */ + if (TREE_CODE (base) == ADDR_EXPR) + { + HOST_WIDE_INT base_size; + + base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0))); + if (base_size > 0 && size < base_size) + size = base_size; + } + + return total_low > (unsigned HOST_WIDE_INT) size; +} + /* Subroutine of fold_binary. This routine performs all of the transformations that are common to the equality/inequality operators (EQ_EXPR and NE_EXPR) and the ordering operators @@ -8535,10 +8600,24 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) { /* We can fold this expression to a constant if the non-constant offset parts are equal. */ - if (offset0 == offset1 - || (offset0 && offset1 - && operand_equal_p (offset0, offset1, 0))) + if ((offset0 == offset1 + || (offset0 && offset1 + && operand_equal_p (offset0, offset1, 0))) + && (code == EQ_EXPR + || code == NE_EXPR + || POINTER_TYPE_OVERFLOW_UNDEFINED)) + { + if (code != EQ_EXPR + && code != NE_EXPR + && bitpos0 != bitpos1 + && (pointer_may_wrap_p (base0, offset0, bitpos0) + || pointer_may_wrap_p (base1, offset1, bitpos1))) + fold_overflow_warning (("assuming pointer wraparound does not " + "occur when comparing P +- C1 with " + "P +- C2"), + WARN_STRICT_OVERFLOW_CONDITIONAL); + switch (code) { case EQ_EXPR: @@ -8563,7 +8642,9 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) because pointer arithmetic is restricted to retain within an object and overflow on pointer differences is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */ - else if (bitpos0 == bitpos1) + else if (bitpos0 == bitpos1 + && ((code == EQ_EXPR || code == NE_EXPR) + || POINTER_TYPE_OVERFLOW_UNDEFINED)) { tree signed_size_type_node; signed_size_type_node = signed_type_for (size_type_node); @@ -8582,6 +8663,15 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) else offset1 = fold_convert (signed_size_type_node, offset1); + if (code != EQ_EXPR + && code != NE_EXPR + && (pointer_may_wrap_p (base0, offset0, bitpos0) + || pointer_may_wrap_p (base1, offset1, bitpos1))) + fold_overflow_warning (("assuming pointer wraparound does not " + "occur when comparing P +- C1 with " + "P +- C2"), + WARN_STRICT_OVERFLOW_COMPARISON); + return fold_build2 (code, type, offset0, offset1); } } @@ -9090,7 +9180,7 @@ get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue) } } - if (DECL_P (expr)) + if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL) return DECL_ALIGN_UNIT (expr); } else if (code == POINTER_PLUS_EXPR) @@ -9706,7 +9796,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* With undefined overflow we can only associate constants with one variable. */ - if ((POINTER_TYPE_P (type) + if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED) || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) && var0 && var1) { @@ -13085,6 +13175,45 @@ fold (tree expr) switch (code) { + case ARRAY_REF: + { + tree op0 = TREE_OPERAND (t, 0); + tree op1 = TREE_OPERAND (t, 1); + + if (TREE_CODE (op1) == INTEGER_CST + && TREE_CODE (op0) == CONSTRUCTOR + && ! type_contains_placeholder_p (TREE_TYPE (op0))) + { + VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0); + unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts); + unsigned HOST_WIDE_INT begin = 0; + + /* Find a matching index by means of a binary search. */ + while (begin != end) + { + unsigned HOST_WIDE_INT middle = (begin + end) / 2; + tree index = VEC_index (constructor_elt, elts, middle)->index; + + if (TREE_CODE (index) == INTEGER_CST + && tree_int_cst_lt (index, op1)) + begin = middle + 1; + else if (TREE_CODE (index) == INTEGER_CST + && tree_int_cst_lt (op1, index)) + end = middle; + else if (TREE_CODE (index) == RANGE_EXPR + && tree_int_cst_lt (TREE_OPERAND (index, 1), op1)) + begin = middle + 1; + else if (TREE_CODE (index) == RANGE_EXPR + && tree_int_cst_lt (op1, TREE_OPERAND (index, 0))) + end = middle; + else + return VEC_index (constructor_elt, elts, middle)->value; + } + } + + return t; + } + case CONST_DECL: return fold (DECL_INITIAL (t)); @@ -13962,6 +14091,137 @@ tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p) *STRICT_OVERFLOW_P. */ bool +tree_call_nonnegative_warnv_p (enum tree_code code, tree type, tree fndecl, + tree arg0, tree arg1, bool *strict_overflow_p) +{ + if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) + switch (DECL_FUNCTION_CODE (fndecl)) + { + CASE_FLT_FN (BUILT_IN_ACOS): + CASE_FLT_FN (BUILT_IN_ACOSH): + CASE_FLT_FN (BUILT_IN_CABS): + CASE_FLT_FN (BUILT_IN_COSH): + CASE_FLT_FN (BUILT_IN_ERFC): + CASE_FLT_FN (BUILT_IN_EXP): + CASE_FLT_FN (BUILT_IN_EXP10): + CASE_FLT_FN (BUILT_IN_EXP2): + CASE_FLT_FN (BUILT_IN_FABS): + CASE_FLT_FN (BUILT_IN_FDIM): + CASE_FLT_FN (BUILT_IN_HYPOT): + CASE_FLT_FN (BUILT_IN_POW10): + CASE_INT_FN (BUILT_IN_FFS): + CASE_INT_FN (BUILT_IN_PARITY): + CASE_INT_FN (BUILT_IN_POPCOUNT): + case BUILT_IN_BSWAP32: + case BUILT_IN_BSWAP64: + /* Always true. */ + return true; + + CASE_FLT_FN (BUILT_IN_SQRT): + /* sqrt(-0.0) is -0.0. */ + if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))) + return true; + return tree_expr_nonnegative_warnv_p (arg0, + strict_overflow_p); + + CASE_FLT_FN (BUILT_IN_ASINH): + CASE_FLT_FN (BUILT_IN_ATAN): + CASE_FLT_FN (BUILT_IN_ATANH): + CASE_FLT_FN (BUILT_IN_CBRT): + CASE_FLT_FN (BUILT_IN_CEIL): + CASE_FLT_FN (BUILT_IN_ERF): + CASE_FLT_FN (BUILT_IN_EXPM1): + CASE_FLT_FN (BUILT_IN_FLOOR): + CASE_FLT_FN (BUILT_IN_FMOD): + CASE_FLT_FN (BUILT_IN_FREXP): + CASE_FLT_FN (BUILT_IN_LCEIL): + CASE_FLT_FN (BUILT_IN_LDEXP): + CASE_FLT_FN (BUILT_IN_LFLOOR): + CASE_FLT_FN (BUILT_IN_LLCEIL): + CASE_FLT_FN (BUILT_IN_LLFLOOR): + CASE_FLT_FN (BUILT_IN_LLRINT): + CASE_FLT_FN (BUILT_IN_LLROUND): + CASE_FLT_FN (BUILT_IN_LRINT): + CASE_FLT_FN (BUILT_IN_LROUND): + CASE_FLT_FN (BUILT_IN_MODF): + CASE_FLT_FN (BUILT_IN_NEARBYINT): + CASE_FLT_FN (BUILT_IN_RINT): + CASE_FLT_FN (BUILT_IN_ROUND): + CASE_FLT_FN (BUILT_IN_SCALB): + CASE_FLT_FN (BUILT_IN_SCALBLN): + CASE_FLT_FN (BUILT_IN_SCALBN): + CASE_FLT_FN (BUILT_IN_SIGNBIT): + CASE_FLT_FN (BUILT_IN_SIGNIFICAND): + CASE_FLT_FN (BUILT_IN_SINH): + CASE_FLT_FN (BUILT_IN_TANH): + CASE_FLT_FN (BUILT_IN_TRUNC): + /* True if the 1st argument is nonnegative. */ + return tree_expr_nonnegative_warnv_p (arg0, + strict_overflow_p); + + CASE_FLT_FN (BUILT_IN_FMAX): + /* True if the 1st OR 2nd arguments are nonnegative. */ + return (tree_expr_nonnegative_warnv_p (arg0, + strict_overflow_p) + || (tree_expr_nonnegative_warnv_p (arg1, + strict_overflow_p))); + + CASE_FLT_FN (BUILT_IN_FMIN): + /* True if the 1st AND 2nd arguments are nonnegative. */ + return (tree_expr_nonnegative_warnv_p (arg0, + strict_overflow_p) + && (tree_expr_nonnegative_warnv_p (arg1, + strict_overflow_p))); + + CASE_FLT_FN (BUILT_IN_COPYSIGN): + /* True if the 2nd argument is nonnegative. */ + return tree_expr_nonnegative_warnv_p (arg1, + strict_overflow_p); + + CASE_FLT_FN (BUILT_IN_POWI): + /* True if the 1st argument is nonnegative or the second + argument is an even integer. */ + if (TREE_CODE (arg1) == INTEGER_CST + && (TREE_INT_CST_LOW (arg1) & 1) == 0) + return true; + return tree_expr_nonnegative_warnv_p (arg0, + strict_overflow_p); + + CASE_FLT_FN (BUILT_IN_POW): + /* True if the 1st argument is nonnegative or the second + argument is an even integer valued real. */ + if (TREE_CODE (arg1) == REAL_CST) + { + REAL_VALUE_TYPE c; + HOST_WIDE_INT n; + + c = TREE_REAL_CST (arg1); + n = real_to_integer (&c); + if ((n & 1) == 0) + { + REAL_VALUE_TYPE cint; + real_from_integer (&cint, VOIDmode, n, + n < 0 ? -1 : 0, 0); + if (real_identical (&c, &cint)) + return true; + } + } + return tree_expr_nonnegative_warnv_p (arg0, + strict_overflow_p); + + default: + break; + } + return tree_simple_nonnegative_warnv_p (code, + type); +} + +/* Return true if T is known to be non-negative. If the return + value is based on the assumption that signed overflow is undefined, + set *STRICT_OVERFLOW_P to true; otherwise, don't change + *STRICT_OVERFLOW_P. */ + +bool tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p) { enum tree_code code = TREE_CODE (t); @@ -14005,133 +14265,16 @@ tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p) case CALL_EXPR: { - tree fndecl = get_callee_fndecl (t); - if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) - switch (DECL_FUNCTION_CODE (fndecl)) - { - CASE_FLT_FN (BUILT_IN_ACOS): - CASE_FLT_FN (BUILT_IN_ACOSH): - CASE_FLT_FN (BUILT_IN_CABS): - CASE_FLT_FN (BUILT_IN_COSH): - CASE_FLT_FN (BUILT_IN_ERFC): - CASE_FLT_FN (BUILT_IN_EXP): - CASE_FLT_FN (BUILT_IN_EXP10): - CASE_FLT_FN (BUILT_IN_EXP2): - CASE_FLT_FN (BUILT_IN_FABS): - CASE_FLT_FN (BUILT_IN_FDIM): - CASE_FLT_FN (BUILT_IN_HYPOT): - CASE_FLT_FN (BUILT_IN_POW10): - CASE_INT_FN (BUILT_IN_FFS): - CASE_INT_FN (BUILT_IN_PARITY): - CASE_INT_FN (BUILT_IN_POPCOUNT): - case BUILT_IN_BSWAP32: - case BUILT_IN_BSWAP64: - /* Always true. */ - return true; - - CASE_FLT_FN (BUILT_IN_SQRT): - /* sqrt(-0.0) is -0.0. */ - if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t)))) - return true; - return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), - strict_overflow_p); - - CASE_FLT_FN (BUILT_IN_ASINH): - CASE_FLT_FN (BUILT_IN_ATAN): - CASE_FLT_FN (BUILT_IN_ATANH): - CASE_FLT_FN (BUILT_IN_CBRT): - CASE_FLT_FN (BUILT_IN_CEIL): - CASE_FLT_FN (BUILT_IN_ERF): - CASE_FLT_FN (BUILT_IN_EXPM1): - CASE_FLT_FN (BUILT_IN_FLOOR): - CASE_FLT_FN (BUILT_IN_FMOD): - CASE_FLT_FN (BUILT_IN_FREXP): - CASE_FLT_FN (BUILT_IN_LCEIL): - CASE_FLT_FN (BUILT_IN_LDEXP): - CASE_FLT_FN (BUILT_IN_LFLOOR): - CASE_FLT_FN (BUILT_IN_LLCEIL): - CASE_FLT_FN (BUILT_IN_LLFLOOR): - CASE_FLT_FN (BUILT_IN_LLRINT): - CASE_FLT_FN (BUILT_IN_LLROUND): - CASE_FLT_FN (BUILT_IN_LRINT): - CASE_FLT_FN (BUILT_IN_LROUND): - CASE_FLT_FN (BUILT_IN_MODF): - CASE_FLT_FN (BUILT_IN_NEARBYINT): - CASE_FLT_FN (BUILT_IN_RINT): - CASE_FLT_FN (BUILT_IN_ROUND): - CASE_FLT_FN (BUILT_IN_SCALB): - CASE_FLT_FN (BUILT_IN_SCALBLN): - CASE_FLT_FN (BUILT_IN_SCALBN): - CASE_FLT_FN (BUILT_IN_SIGNBIT): - CASE_FLT_FN (BUILT_IN_SIGNIFICAND): - CASE_FLT_FN (BUILT_IN_SINH): - CASE_FLT_FN (BUILT_IN_TANH): - CASE_FLT_FN (BUILT_IN_TRUNC): - /* True if the 1st argument is nonnegative. */ - return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), - strict_overflow_p); - - CASE_FLT_FN (BUILT_IN_FMAX): - /* True if the 1st OR 2nd arguments are nonnegative. */ - return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), - strict_overflow_p) - || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1), - strict_overflow_p))); - - CASE_FLT_FN (BUILT_IN_FMIN): - /* True if the 1st AND 2nd arguments are nonnegative. */ - return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), - strict_overflow_p) - && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1), - strict_overflow_p))); - - CASE_FLT_FN (BUILT_IN_COPYSIGN): - /* True if the 2nd argument is nonnegative. */ - return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1), - strict_overflow_p); - - CASE_FLT_FN (BUILT_IN_POWI): - /* True if the 1st argument is nonnegative or the second - argument is an even integer. */ - if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST) - { - tree arg1 = CALL_EXPR_ARG (t, 1); - if ((TREE_INT_CST_LOW (arg1) & 1) == 0) - return true; - } - return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), - strict_overflow_p); - - CASE_FLT_FN (BUILT_IN_POW): - /* True if the 1st argument is nonnegative or the second - argument is an even integer valued real. */ - if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST) - { - REAL_VALUE_TYPE c; - HOST_WIDE_INT n; - - c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1)); - n = real_to_integer (&c); - if ((n & 1) == 0) - { - REAL_VALUE_TYPE cint; - real_from_integer (&cint, VOIDmode, n, - n < 0 ? -1 : 0, 0); - if (real_identical (&c, &cint)) - return true; - } - } - return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0), - strict_overflow_p); + tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE; + tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE; - default: - break; - } - return tree_simple_nonnegative_warnv_p (TREE_CODE (t), - TREE_TYPE (t)); + return tree_call_nonnegative_warnv_p (TREE_CODE (t), + TREE_TYPE (t), + get_callee_fndecl (t), + arg0, + arg1, + strict_overflow_p); } - break; - case COMPOUND_EXPR: case MODIFY_EXPR: case GIMPLE_MODIFY_STMT: @@ -14963,6 +15106,34 @@ fold_indirect_ref_1 (tree type, tree op0) } } + /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF */ + if (TREE_CODE (sub) == POINTER_PLUS_EXPR + && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) + { + tree op00 = TREE_OPERAND (sub, 0); + tree op01 = TREE_OPERAND (sub, 1); + tree op00type; + + STRIP_NOPS (op00); + op00type = TREE_TYPE (op00); + if (TREE_CODE (op00) == ADDR_EXPR + && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE + && type == TREE_TYPE (TREE_TYPE (op00type))) + { + HOST_WIDE_INT offset = tree_low_cst (op01, 0); + tree part_width = TYPE_SIZE (type); + unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT; + unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT; + tree index = bitsize_int (indexi); + + if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type))) + return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0), + part_width, index); + + } + } + + /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */ if (TREE_CODE (sub) == POINTER_PLUS_EXPR && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)