X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ffold-const.c;h=84dad8b7fb7625135f7d144157d5d97806e9bde9;hb=d067185e213f6a0fa48055ee34ad63a0682b974b;hp=21250f6c4b2d0afee141f97db590f915221fc18e;hpb=ad1f9c12743b531bfe3077bc366ce0a5d73f94ff;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 21250f6c4b2..84dad8b7fb7 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -16,8 +16,8 @@ for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to the Free -Software Foundation, 59 Temple Place - Suite 330, Boston, MA -02111-1307, USA. */ +Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA +02110-1301, USA. */ /*@@ This file should be rewritten to use an arbitrary precision @@ representation for "struct tree_int_cst" and "struct tree_real_cst". @@ -89,7 +89,6 @@ static tree negate_expr (tree); static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); static tree associate_trees (tree, tree, enum tree_code, tree); static tree const_binop (enum tree_code, tree, tree, int); -static enum tree_code invert_tree_comparison (enum tree_code, bool); static enum comparison_code comparison_to_compcode (enum tree_code); static enum tree_code compcode_to_comparison (enum comparison_code); static tree combine_comparisons (enum tree_code, enum tree_code, @@ -133,7 +132,6 @@ static bool reorder_operands_p (tree, tree); static tree fold_negate_const (tree, tree); static tree fold_not_const (tree, tree); static tree fold_relational_const (enum tree_code, tree, tree, tree); -static bool tree_expr_nonzero_p (tree); /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring overflow. Suppose A, B and SUM have the same respective signs as A1, B1, @@ -831,6 +829,33 @@ div_and_round_double (enum tree_code code, int uns, add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); return overflow; } + +/* If ARG2 divides ARG1 with zero remainder, carries out the division + of type CODE and returns the quotient. + Otherwise returns NULL_TREE. */ + +static tree +div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2) +{ + unsigned HOST_WIDE_INT int1l, int2l; + HOST_WIDE_INT int1h, int2h; + unsigned HOST_WIDE_INT quol, reml; + HOST_WIDE_INT quoh, remh; + tree type = TREE_TYPE (arg1); + int uns = TYPE_UNSIGNED (type); + + int1l = TREE_INT_CST_LOW (arg1); + int1h = TREE_INT_CST_HIGH (arg1); + int2l = TREE_INT_CST_LOW (arg2); + int2h = TREE_INT_CST_HIGH (arg2); + + div_and_round_double (code, uns, int1l, int1h, int2l, int2h, + &quol, &quoh, &reml, &remh); + if (remh != 0 || reml != 0) + return NULL_TREE; + + return build_int_cst_wide (type, quol, quoh); +} /* Return true if built-in mathematical function specified by CODE preserves the sign of it argument, i.e. -f(x) == f(-x). */ @@ -1573,33 +1598,36 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) case RDIV_EXPR: { + tree t1, t2, real, imag; tree magsquared = const_binop (PLUS_EXPR, const_binop (MULT_EXPR, r2, r2, notrunc), const_binop (MULT_EXPR, i2, i2, notrunc), notrunc); - t = build_complex (type, - const_binop - (INTEGRAL_TYPE_P (TREE_TYPE (r1)) - ? TRUNC_DIV_EXPR : RDIV_EXPR, - const_binop (PLUS_EXPR, - const_binop (MULT_EXPR, r1, r2, - notrunc), - const_binop (MULT_EXPR, i1, i2, - notrunc), - notrunc), - magsquared, notrunc), - const_binop - (INTEGRAL_TYPE_P (TREE_TYPE (r1)) - ? TRUNC_DIV_EXPR : RDIV_EXPR, - const_binop (MINUS_EXPR, - const_binop (MULT_EXPR, i1, r2, - notrunc), - const_binop (MULT_EXPR, r1, i2, - notrunc), - notrunc), - magsquared, notrunc)); + t1 = const_binop (PLUS_EXPR, + const_binop (MULT_EXPR, r1, r2, notrunc), + const_binop (MULT_EXPR, i1, i2, notrunc), + notrunc); + t2 = const_binop (MINUS_EXPR, + const_binop (MULT_EXPR, i1, r2, notrunc), + const_binop (MULT_EXPR, r1, i2, notrunc), + notrunc); + + if (INTEGRAL_TYPE_P (TREE_TYPE (r1))) + { + real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc); + imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc); + } + else + { + real = const_binop (RDIV_EXPR, t1, magsquared, notrunc); + imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc); + if (!real || !imag) + return NULL_TREE; + } + + t = build_complex (type, real, imag); } break; @@ -2001,7 +2029,7 @@ fold_convert (tree type, tree arg) } } -/* Return false if expr can be assumed not to be an value, true +/* Return false if expr can be assumed not to be an lvalue, true otherwise. */ static bool @@ -2089,7 +2117,7 @@ pedantic_non_lvalue (tree x) comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode as well: if reversing the comparison is unsafe, return ERROR_MARK. */ -static enum tree_code +enum tree_code invert_tree_comparison (enum tree_code code, bool honor_nans) { if (honor_nans && flag_trapping_math) @@ -2140,6 +2168,10 @@ swap_tree_comparison (enum tree_code code) { case EQ_EXPR: case NE_EXPR: + case ORDERED_EXPR: + case UNORDERED_EXPR: + case LTGT_EXPR: + case UNEQ_EXPR: return code; case GT_EXPR: return LT_EXPR; @@ -2149,6 +2181,14 @@ swap_tree_comparison (enum tree_code code) return GT_EXPR; case LE_EXPR: return GE_EXPR; + case UNGT_EXPR: + return UNLT_EXPR; + case UNGE_EXPR: + return UNLE_EXPR; + case UNLT_EXPR: + return UNGT_EXPR; + case UNLE_EXPR: + return UNGE_EXPR; default: gcc_unreachable (); } @@ -2431,7 +2471,7 @@ operand_equal_p (tree arg0, tree arg1, unsigned int flags) v2 = TREE_CHAIN (v2); } - return 1; + return v1 == v2; } case COMPLEX_CST: @@ -3076,6 +3116,46 @@ distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1) return fold_build2 (TREE_CODE (arg0), type, common, fold_build2 (code, type, left, right)); } + +/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation + with code CODE. This optimization is unsafe. */ +static tree +distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1) +{ + bool mul0 = TREE_CODE (arg0) == MULT_EXPR; + bool mul1 = TREE_CODE (arg1) == MULT_EXPR; + + /* (A / C) +- (B / C) -> (A +- B) / C. */ + if (mul0 == mul1 + && operand_equal_p (TREE_OPERAND (arg0, 1), + TREE_OPERAND (arg1, 1), 0)) + return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type, + fold_build2 (code, type, + TREE_OPERAND (arg0, 0), + TREE_OPERAND (arg1, 0)), + TREE_OPERAND (arg0, 1)); + + /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */ + if (operand_equal_p (TREE_OPERAND (arg0, 0), + TREE_OPERAND (arg1, 0), 0) + && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST + && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) + { + REAL_VALUE_TYPE r0, r1; + r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1)); + r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1)); + if (!mul0) + real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0); + if (!mul1) + real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1); + real_arithmetic (&r0, code, &r0, &r1); + return fold_build2 (MULT_EXPR, type, + TREE_OPERAND (arg0, 0), + build_real (type, r0)); + } + + return NULL_TREE; +} /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */ @@ -3849,6 +3929,15 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) tree etype = TREE_TYPE (exp); tree value; +#ifdef HAVE_canonicalize_funcptr_for_compare + /* Disable this optimization for function pointer expressions + on targets that require function pointer canonicalization. */ + if (HAVE_canonicalize_funcptr_for_compare + && TREE_CODE (etype) == POINTER_TYPE + && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE) + return NULL_TREE; +#endif + if (! in_p) { value = build_range_check (type, exp, 1, low, high); @@ -3862,13 +3951,16 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) return fold_convert (type, integer_one_node); if (low == 0) - return fold_build2 (LE_EXPR, type, exp, high); + return fold_build2 (LE_EXPR, type, exp, + fold_convert (etype, high)); if (high == 0) - return fold_build2 (GE_EXPR, type, exp, low); + return fold_build2 (GE_EXPR, type, exp, + fold_convert (etype, low)); if (operand_equal_p (low, high, 0)) - return fold_build2 (EQ_EXPR, type, exp, low); + return fold_build2 (EQ_EXPR, type, exp, + fold_convert (etype, low)); if (integer_zerop (low)) { @@ -5418,26 +5510,31 @@ constant_boolean_node (int value, tree type) /* Return true if expr looks like an ARRAY_REF and set base and offset to the appropriate trees. If there is no offset, - offset is set to NULL_TREE. */ + offset is set to NULL_TREE. Base will be canonicalized to + something you can get the element type from using + TREE_TYPE (TREE_TYPE (base)). */ static bool extract_array_ref (tree expr, tree *base, tree *offset) { - /* We have to be careful with stripping nops as with the - base type the meaning of the offset can change. */ - tree inner_expr = expr; - STRIP_NOPS (inner_expr); /* One canonical form is a PLUS_EXPR with the first argument being an ADDR_EXPR with a possible NOP_EXPR attached. */ if (TREE_CODE (expr) == PLUS_EXPR) { tree op0 = TREE_OPERAND (expr, 0); + tree inner_base, dummy1; + /* Strip NOP_EXPRs here because the C frontends and/or + folders present us (int *)&x.a + 4B possibly. */ STRIP_NOPS (op0); - if (TREE_CODE (op0) == ADDR_EXPR) + if (extract_array_ref (op0, &inner_base, &dummy1)) { - *base = TREE_OPERAND (expr, 0); - *offset = TREE_OPERAND (expr, 1); + *base = inner_base; + if (dummy1 == NULL_TREE) + *offset = TREE_OPERAND (expr, 1); + else + *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr), + dummy1, TREE_OPERAND (expr, 1)); return true; } } @@ -5446,21 +5543,33 @@ extract_array_ref (tree expr, tree *base, tree *offset) offset. For other arguments to the ADDR_EXPR we assume zero offset and as such do not care about the ADDR_EXPR type and strip possible nops from it. */ - else if (TREE_CODE (inner_expr) == ADDR_EXPR) + else if (TREE_CODE (expr) == ADDR_EXPR) { - tree op0 = TREE_OPERAND (inner_expr, 0); + tree op0 = TREE_OPERAND (expr, 0); if (TREE_CODE (op0) == ARRAY_REF) { - *base = build_fold_addr_expr (TREE_OPERAND (op0, 0)); + *base = TREE_OPERAND (op0, 0); *offset = TREE_OPERAND (op0, 1); } else { - *base = inner_expr; + /* Handle array-to-pointer decay as &a. */ + if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) + *base = TREE_OPERAND (expr, 0); + else + *base = expr; *offset = NULL_TREE; } return true; } + /* The next canonical form is a VAR_DECL with POINTER_TYPE. */ + else if (SSA_VAR_P (expr) + && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE) + { + *base = expr; + *offset = NULL_TREE; + return true; + } return false; } @@ -6311,6 +6420,10 @@ try_move_mult_to_index (enum tree_code code, tree addr, tree op1) { if (TREE_CODE (ref) == ARRAY_REF) { + itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); + if (! itype) + continue; + step = array_ref_element_size (ref); if (TREE_CODE (step) != INTEGER_CST) continue; @@ -6323,17 +6436,12 @@ try_move_mult_to_index (enum tree_code code, tree addr, tree op1) else { /* Try if delta is a multiple of step. */ - tree mod = int_const_binop (TRUNC_MOD_EXPR, delta, step, 0); - if (!integer_zerop (mod)) + tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step); + if (! tmp) continue; - - delta = int_const_binop (EXACT_DIV_EXPR, delta, step, 0); + delta = tmp; } - itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); - if (! itype) - continue; - break; } @@ -6408,300 +6516,6 @@ fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound) return fold_build2 (GE_EXPR, type, a, y); } -/* Fold complex addition when both components are accessible by parts. - Return non-null if successful. CODE should be PLUS_EXPR for addition, - or MINUS_EXPR for subtraction. */ - -static tree -fold_complex_add (tree type, tree ac, tree bc, enum tree_code code) -{ - tree ar, ai, br, bi, rr, ri, inner_type; - - if (TREE_CODE (ac) == COMPLEX_EXPR) - ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1); - else if (TREE_CODE (ac) == COMPLEX_CST) - ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac); - else - return NULL; - - if (TREE_CODE (bc) == COMPLEX_EXPR) - br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1); - else if (TREE_CODE (bc) == COMPLEX_CST) - br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc); - else - return NULL; - - inner_type = TREE_TYPE (type); - - rr = fold_build2 (code, inner_type, ar, br); - ri = fold_build2 (code, inner_type, ai, bi); - - return fold_build2 (COMPLEX_EXPR, type, rr, ri); -} - -/* Perform some simplifications of complex multiplication when one or more - of the components are constants or zeros. Return non-null if successful. */ - -tree -fold_complex_mult_parts (tree type, tree ar, tree ai, tree br, tree bi) -{ - tree rr, ri, inner_type, zero; - bool ar0, ai0, br0, bi0, bi1; - - inner_type = TREE_TYPE (type); - zero = NULL; - - if (SCALAR_FLOAT_TYPE_P (inner_type)) - { - ar0 = ai0 = br0 = bi0 = bi1 = false; - - /* We're only interested in +0.0 here, thus we don't use real_zerop. */ - - if (TREE_CODE (ar) == REAL_CST - && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0)) - ar0 = true, zero = ar; - - if (TREE_CODE (ai) == REAL_CST - && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0)) - ai0 = true, zero = ai; - - if (TREE_CODE (br) == REAL_CST - && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0)) - br0 = true, zero = br; - - if (TREE_CODE (bi) == REAL_CST) - { - if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0)) - bi0 = true, zero = bi; - else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1)) - bi1 = true; - } - } - else - { - ar0 = integer_zerop (ar); - if (ar0) - zero = ar; - ai0 = integer_zerop (ai); - if (ai0) - zero = ai; - br0 = integer_zerop (br); - if (br0) - zero = br; - bi0 = integer_zerop (bi); - if (bi0) - { - zero = bi; - bi1 = false; - } - else - bi1 = integer_onep (bi); - } - - /* We won't optimize anything below unless something is zero. */ - if (zero == NULL) - return NULL; - - if (ai0 && br0 && bi1) - { - rr = zero; - ri = ar; - } - else if (ai0 && bi0) - { - rr = fold_build2 (MULT_EXPR, inner_type, ar, br); - ri = zero; - } - else if (ai0 && br0) - { - rr = zero; - ri = fold_build2 (MULT_EXPR, inner_type, ar, bi); - } - else if (ar0 && bi0) - { - rr = zero; - ri = fold_build2 (MULT_EXPR, inner_type, ai, br); - } - else if (ar0 && br0) - { - rr = fold_build2 (MULT_EXPR, inner_type, ai, bi); - rr = fold_build1 (NEGATE_EXPR, inner_type, rr); - ri = zero; - } - else if (bi0) - { - rr = fold_build2 (MULT_EXPR, inner_type, ar, br); - ri = fold_build2 (MULT_EXPR, inner_type, ai, br); - } - else if (ai0) - { - rr = fold_build2 (MULT_EXPR, inner_type, ar, br); - ri = fold_build2 (MULT_EXPR, inner_type, ar, bi); - } - else if (br0) - { - rr = fold_build2 (MULT_EXPR, inner_type, ai, bi); - rr = fold_build1 (NEGATE_EXPR, inner_type, rr); - ri = fold_build2 (MULT_EXPR, inner_type, ar, bi); - } - else if (ar0) - { - rr = fold_build2 (MULT_EXPR, inner_type, ai, bi); - rr = fold_build1 (NEGATE_EXPR, inner_type, rr); - ri = fold_build2 (MULT_EXPR, inner_type, ai, br); - } - else - return NULL; - - return fold_build2 (COMPLEX_EXPR, type, rr, ri); -} - -static tree -fold_complex_mult (tree type, tree ac, tree bc) -{ - tree ar, ai, br, bi; - - if (TREE_CODE (ac) == COMPLEX_EXPR) - ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1); - else if (TREE_CODE (ac) == COMPLEX_CST) - ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac); - else - return NULL; - - if (TREE_CODE (bc) == COMPLEX_EXPR) - br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1); - else if (TREE_CODE (bc) == COMPLEX_CST) - br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc); - else - return NULL; - - return fold_complex_mult_parts (type, ar, ai, br, bi); -} - -/* Perform some simplifications of complex division when one or more of - the components are constants or zeros. Return non-null if successful. */ - -tree -fold_complex_div_parts (tree type, tree ar, tree ai, tree br, tree bi, - enum tree_code code) -{ - tree rr, ri, inner_type, zero; - bool ar0, ai0, br0, bi0, bi1; - - inner_type = TREE_TYPE (type); - zero = NULL; - - if (SCALAR_FLOAT_TYPE_P (inner_type)) - { - ar0 = ai0 = br0 = bi0 = bi1 = false; - - /* We're only interested in +0.0 here, thus we don't use real_zerop. */ - - if (TREE_CODE (ar) == REAL_CST - && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ar), dconst0)) - ar0 = true, zero = ar; - - if (TREE_CODE (ai) == REAL_CST - && REAL_VALUES_IDENTICAL (TREE_REAL_CST (ai), dconst0)) - ai0 = true, zero = ai; - - if (TREE_CODE (br) == REAL_CST - && REAL_VALUES_IDENTICAL (TREE_REAL_CST (br), dconst0)) - br0 = true, zero = br; - - if (TREE_CODE (bi) == REAL_CST) - { - if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst0)) - bi0 = true, zero = bi; - else if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (bi), dconst1)) - bi1 = true; - } - } - else - { - ar0 = integer_zerop (ar); - if (ar0) - zero = ar; - ai0 = integer_zerop (ai); - if (ai0) - zero = ai; - br0 = integer_zerop (br); - if (br0) - zero = br; - bi0 = integer_zerop (bi); - if (bi0) - { - zero = bi; - bi1 = false; - } - else - bi1 = integer_onep (bi); - } - - /* We won't optimize anything below unless something is zero. */ - if (zero == NULL) - return NULL; - - if (ai0 && bi0) - { - rr = fold_build2 (code, inner_type, ar, br); - ri = zero; - } - else if (ai0 && br0) - { - rr = zero; - ri = fold_build2 (code, inner_type, ar, bi); - ri = fold_build1 (NEGATE_EXPR, inner_type, ri); - } - else if (ar0 && bi0) - { - rr = zero; - ri = fold_build2 (code, inner_type, ai, br); - } - else if (ar0 && br0) - { - rr = fold_build2 (code, inner_type, ai, bi); - ri = zero; - } - else if (bi0) - { - rr = fold_build2 (code, inner_type, ar, br); - ri = fold_build2 (code, inner_type, ai, br); - } - else if (br0) - { - rr = fold_build2 (code, inner_type, ai, bi); - ri = fold_build2 (code, inner_type, ar, bi); - ri = fold_build1 (NEGATE_EXPR, inner_type, ri); - } - else - return NULL; - - return fold_build2 (COMPLEX_EXPR, type, rr, ri); -} - -static tree -fold_complex_div (tree type, tree ac, tree bc, enum tree_code code) -{ - tree ar, ai, br, bi; - - if (TREE_CODE (ac) == COMPLEX_EXPR) - ar = TREE_OPERAND (ac, 0), ai = TREE_OPERAND (ac, 1); - else if (TREE_CODE (ac) == COMPLEX_CST) - ar = TREE_REALPART (ac), ai = TREE_IMAGPART (ac); - else - return NULL; - - if (TREE_CODE (bc) == COMPLEX_EXPR) - br = TREE_OPERAND (bc, 0), bi = TREE_OPERAND (bc, 1); - else if (TREE_CODE (bc) == COMPLEX_CST) - br = TREE_REALPART (bc), bi = TREE_IMAGPART (bc); - else - return NULL; - - return fold_complex_div_parts (type, ar, ai, br, bi, code); -} - /* Fold a unary expression of code CODE and type TYPE with operand OP0. Return the folded expression if folding is successful. Otherwise, return NULL_TREE. */ @@ -6719,9 +6533,11 @@ fold_unary (enum tree_code code, tree type, tree op0) arg0 = op0; if (arg0) { - if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR) + if (code == NOP_EXPR || code == CONVERT_EXPR + || code == FLOAT_EXPR || code == ABS_EXPR) { - /* Don't use STRIP_NOPS, because signedness of argument type matters. */ + /* Don't use STRIP_NOPS, because signedness of argument type + matters. */ STRIP_SIGN_NOPS (arg0); } else @@ -6904,6 +6720,29 @@ fold_unary (enum tree_code code, tree type, tree op0) return fold_build1 (code, type, TREE_OPERAND (op0, 0)); } + /* Handle (T *)&A.B.C for A being of type T and B and C + living at offset zero. This occurs frequently in + C++ upcasting and then accessing the base. */ + if (TREE_CODE (op0) == ADDR_EXPR + && POINTER_TYPE_P (type) + && handled_component_p (TREE_OPERAND (op0, 0))) + { + HOST_WIDE_INT bitsize, bitpos; + tree offset; + enum machine_mode mode; + int unsignedp, volatilep; + tree base = TREE_OPERAND (op0, 0); + base = get_inner_reference (base, &bitsize, &bitpos, &offset, + &mode, &unsignedp, &volatilep, false); + /* If the reference was to a (constant) zero offset, we can use + the address of the base if it has the same base type + as the result type. */ + if (! offset && bitpos == 0 + && TYPE_MAIN_VARIANT (TREE_TYPE (type)) + == TYPE_MAIN_VARIANT (TREE_TYPE (base))) + return fold_convert (type, build_fold_addr_expr (base)); + } + if (TREE_CODE (op0) == MODIFY_EXPR && TREE_CONSTANT (TREE_OPERAND (op0, 1)) /* Detect assigning a bitfield. */ @@ -7021,7 +6860,8 @@ fold_unary (enum tree_code code, tree type, tree op0) TREE_TYPE (targ0), targ0)); } - else if (tree_expr_nonnegative_p (arg0)) + /* ABS_EXPR> = ABS_EXPR even if flag_wrapv is on. */ + else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR) return arg0; /* Strip sign ops from argument. */ @@ -7333,13 +7173,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && integer_onep (arg1)) return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0)); - if (TREE_CODE (type) == COMPLEX_TYPE) - { - tem = fold_complex_add (type, arg0, arg1, PLUS_EXPR); - if (tem) - return tem; - } - if (! FLOAT_TYPE_P (type)) { if (integer_zerop (arg1)) @@ -7502,6 +7335,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) fold_convert (type, tem)); } + if (flag_unsafe_math_optimizations + && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) + && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) + && (tem = distribute_real_division (code, type, arg0, arg1))) + return tem; + /* Convert x+x into x*2.0. */ if (operand_equal_p (arg0, arg1, 0) && SCALAR_FLOAT_TYPE_P (type)) @@ -7779,13 +7618,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && integer_all_onesp (arg0)) return fold_build1 (BIT_NOT_EXPR, type, arg1); - if (TREE_CODE (type) == COMPLEX_TYPE) - { - tem = fold_complex_add (type, arg0, arg1, MINUS_EXPR); - if (tem) - return tem; - } - if (! FLOAT_TYPE_P (type)) { if (! wins && integer_zerop (arg0)) @@ -7856,7 +7688,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && (TREE_CODE (arg1) != REAL_CST || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))) || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))) - return fold_build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)); + return fold_build2 (PLUS_EXPR, type, + fold_convert (type, arg0), + fold_convert (type, negate_expr (arg1))); /* Try folding difference of addresses. */ { @@ -7899,6 +7733,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) return fold_convert (type, fold (tem)); } + if (flag_unsafe_math_optimizations + && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) + && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) + && (tem = distribute_real_division (code, type, arg0, arg1))) + return tem; + if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)) @@ -7934,13 +7774,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) negate_expr (arg0), TREE_OPERAND (arg1, 0)); - if (TREE_CODE (type) == COMPLEX_TYPE) - { - tem = fold_complex_mult (type, arg0, arg1); - if (tem) - return tem; - } - if (! FLOAT_TYPE_P (type)) { if (integer_zerop (arg1)) @@ -8260,6 +8093,54 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) goto bit_ior; } + /* (X | Y) ^ X -> Y & ~ X*/ + if (TREE_CODE (arg0) == BIT_IOR_EXPR + && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) + { + tree t2 = TREE_OPERAND (arg0, 1); + t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), + arg1); + t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), + fold_convert (type, t1)); + return t1; + } + + /* (Y | X) ^ X -> Y & ~ X*/ + if (TREE_CODE (arg0) == BIT_IOR_EXPR + && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) + { + tree t2 = TREE_OPERAND (arg0, 0); + t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), + arg1); + t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), + fold_convert (type, t1)); + return t1; + } + + /* X ^ (X | Y) -> Y & ~ X*/ + if (TREE_CODE (arg1) == BIT_IOR_EXPR + && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0)) + { + tree t2 = TREE_OPERAND (arg1, 1); + t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0), + arg0); + t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), + fold_convert (type, t1)); + return t1; + } + + /* X ^ (Y | X) -> Y & ~ X*/ + if (TREE_CODE (arg1) == BIT_IOR_EXPR + && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0)) + { + tree t2 = TREE_OPERAND (arg1, 0); + t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0), + arg0); + t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), + fold_convert (type, t1)); + return t1; + } + /* Convert ~X ^ ~Y to X ^ Y. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == BIT_NOT_EXPR) @@ -8369,7 +8250,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r)) { tem = build_real (type, r); - return fold_build2 (MULT_EXPR, type, arg0, tem); + return fold_build2 (MULT_EXPR, type, + fold_convert (type, arg0), tem); } } } @@ -8401,13 +8283,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) TREE_OPERAND (arg1, 0)); } - if (TREE_CODE (type) == COMPLEX_TYPE) - { - tem = fold_complex_div (type, arg0, arg1, code); - if (tem) - return tem; - } - if (flag_unsafe_math_optimizations) { enum built_in_function fcode = builtin_mathfn_code (arg1); @@ -8531,12 +8406,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE))) return fold_convert (type, tem); - if (TREE_CODE (type) == COMPLEX_TYPE) - { - tem = fold_complex_div (type, arg0, arg1, code); - if (tem) - return tem; - } goto binary; case CEIL_MOD_EXPR: @@ -8565,11 +8434,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_INT_CST_HIGH (arg1) == -1) return omit_one_operand (type, integer_zero_node, arg0); - /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a - BIT_AND_EXPR, i.e. "X % C" into "X & C2". */ - if (code == TRUNC_MOD_EXPR - && TYPE_UNSIGNED (type) - && integer_pow2p (arg1)) + /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, + i.e. "X % C" into "X & C2", if X and C are positive. */ + if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR) + && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)) + && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0) { unsigned HOST_WIDE_INT high, low; tree mask; @@ -8642,6 +8511,58 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) don't try to compute it in the compiler. */ if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0) return NULL_TREE; + + /* Turn (a OP c1) OP c2 into a OP (c1+c2). */ + if (TREE_CODE (arg0) == code && host_integerp (arg1, false) + && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type) + && host_integerp (TREE_OPERAND (arg0, 1), false) + && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type)) + { + HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) + + TREE_INT_CST_LOW (arg1)); + + /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2 + being well defined. */ + if (low >= TYPE_PRECISION (type)) + { + if (code == LROTATE_EXPR || code == RROTATE_EXPR) + low = low % TYPE_PRECISION (type); + else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR) + return build_int_cst (type, 0); + else + low = TYPE_PRECISION (type) - 1; + } + + return fold_build2 (code, type, TREE_OPERAND (arg0, 0), + build_int_cst (type, low)); + } + + /* Transform (x >> c) << c into x & (-1<> c + into x & ((unsigned)-1 >> c) for unsigned types. */ + if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR) + || (TYPE_UNSIGNED (type) + && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR)) + && host_integerp (arg1, false) + && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type) + && host_integerp (TREE_OPERAND (arg0, 1), false) + && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type)) + { + HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)); + HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1); + tree lshift; + tree arg00; + + if (low0 == low1) + { + arg00 = fold_convert (type, TREE_OPERAND (arg0, 0)); + + lshift = build_int_cst (type, -1); + lshift = int_const_binop (code, lshift, arg1, 0); + + return fold_build2 (BIT_AND_EXPR, type, arg00, lshift); + } + } + /* Rewrite an LROTATE_EXPR by a constant into an RROTATE_EXPR by a new constant. */ if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST) @@ -8739,11 +8660,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && !TREE_SIDE_EFFECTS (arg1)) { tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1); - if (tem) + if (tem && !operand_equal_p (tem, arg0, 0)) return fold_build2 (code, type, tem, arg1); tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0); - if (tem) + if (tem && !operand_equal_p (tem, arg1, 0)) return fold_build2 (code, type, arg0, tem); } @@ -8900,7 +8821,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) object against zero, then we know the result. */ if ((code == EQ_EXPR || code == NE_EXPR) && TREE_CODE (arg0) == ADDR_EXPR - && DECL_P (TREE_OPERAND (arg0, 0)) + && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) && ! DECL_WEAK (TREE_OPERAND (arg0, 0)) && integer_zerop (arg1)) return constant_boolean_node (code != EQ_EXPR, type); @@ -8910,20 +8831,34 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) have access to attributes for externs), then we know the result. */ if ((code == EQ_EXPR || code == NE_EXPR) && TREE_CODE (arg0) == ADDR_EXPR - && DECL_P (TREE_OPERAND (arg0, 0)) + && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0)) && ! DECL_WEAK (TREE_OPERAND (arg0, 0)) && ! lookup_attribute ("alias", DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0))) && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0)) && TREE_CODE (arg1) == ADDR_EXPR - && DECL_P (TREE_OPERAND (arg1, 0)) + && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0)) && ! DECL_WEAK (TREE_OPERAND (arg1, 0)) && ! lookup_attribute ("alias", DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0))) && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0))) - return constant_boolean_node (operand_equal_p (arg0, arg1, 0) - ? code == EQ_EXPR : code != EQ_EXPR, - type); + { + /* We know that we're looking at the address of two + non-weak, unaliased, static _DECL nodes. + + It is both wasteful and incorrect to call operand_equal_p + to compare the two ADDR_EXPR nodes. It is wasteful in that + all we need to do is test pointer equality for the arguments + to the two ADDR_EXPR nodes. It is incorrect to use + operand_equal_p as that function is NOT equivalent to a + C equality test. It can in fact return false for two + objects which would test as equal using the C equality + operator. */ + bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0); + return constant_boolean_node (equal + ? code == EQ_EXPR : code != EQ_EXPR, + type); + } /* If this is a comparison of two exprs that look like an ARRAY_REF of the same object, then we can fold this to a @@ -8936,6 +8871,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && extract_array_ref (arg1, &base1, &offset1) && operand_equal_p (base0, base1, 0)) { + if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0))) + && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0))))) + offset0 = NULL_TREE; + if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1))) + && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1))))) + offset1 = NULL_TREE; if (offset0 == NULL_TREE && offset1 == NULL_TREE) { @@ -9023,6 +8964,30 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) } } + /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */ + if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) + && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST + && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) + && !TYPE_UNSIGNED (TREE_TYPE (arg1)) + && !(flag_wrapv || flag_trapv)) + && (TREE_CODE (arg1) == INTEGER_CST + && !TREE_OVERFLOW (arg1))) + { + tree const1 = TREE_OPERAND (arg0, 1); + tree const2 = arg1; + tree variable = TREE_OPERAND (arg0, 0); + tree lhs; + int lhs_add; + lhs_add = TREE_CODE (arg0) != PLUS_EXPR; + + lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR, + TREE_TYPE (arg1), const2, const1); + if (TREE_CODE (lhs) == TREE_CODE (arg1) + && (TREE_CODE (lhs) != INTEGER_CST + || !TREE_OVERFLOW (lhs))) + return fold_build2 (code, type, variable, lhs); + } + if (FLOAT_TYPE_P (TREE_TYPE (arg0))) { tree targ0 = strip_float_extensions (arg0); @@ -9197,12 +9162,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) switch (code) { case GE_EXPR: - arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); - return fold_build2 (GT_EXPR, type, arg0, arg1); + arg1 = const_binop (MINUS_EXPR, arg1, + build_int_cst (TREE_TYPE (arg1), 1), 0); + return fold_build2 (GT_EXPR, type, arg0, + fold_convert (TREE_TYPE (arg0), arg1)); case LT_EXPR: - arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); - return fold_build2 (LE_EXPR, type, arg0, arg1); + arg1 = const_binop (MINUS_EXPR, arg1, + build_int_cst (TREE_TYPE (arg1), 1), 0); + return fold_build2 (LE_EXPR, type, arg0, + fold_convert (TREE_TYPE (arg0), arg1)); default: break; @@ -9350,10 +9319,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree st0, st1; st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0)); st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1)); - return fold - (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR, - type, fold_convert (st0, arg0), - fold_convert (st1, integer_zero_node))); + return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR, + type, fold_convert (st0, arg0), + build_int_cst (st1, 0)); } } } @@ -9764,34 +9732,6 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) } } - /* If this is a comparison of complex values and either or both sides - are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the - comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. - This may prevent needless evaluations. */ - if ((code == EQ_EXPR || code == NE_EXPR) - && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE - && (TREE_CODE (arg0) == COMPLEX_EXPR - || TREE_CODE (arg1) == COMPLEX_EXPR - || TREE_CODE (arg0) == COMPLEX_CST - || TREE_CODE (arg1) == COMPLEX_CST)) - { - tree subtype = TREE_TYPE (TREE_TYPE (arg0)); - tree real0, imag0, real1, imag1; - - arg0 = save_expr (arg0); - arg1 = save_expr (arg1); - real0 = fold_build1 (REALPART_EXPR, subtype, arg0); - imag0 = fold_build1 (IMAGPART_EXPR, subtype, arg0); - real1 = fold_build1 (REALPART_EXPR, subtype, arg1); - imag1 = fold_build1 (IMAGPART_EXPR, subtype, arg1); - - return fold_build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR - : TRUTH_ORIF_EXPR), - type, - fold_build2 (code, type, real0, real1), - fold_build2 (code, type, imag0, imag1)); - } - /* Optimize comparisons of strlen vs zero to a compare of the first character of the string vs zero. To wit, strlen(ptr) == 0 => *ptr == 0 @@ -9811,11 +9751,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && (arglist = TREE_OPERAND (arg0, 1)) && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE && ! TREE_CHAIN (arglist)) - return fold_build2 (code, type, - build1 (INDIRECT_REF, char_type_node, - TREE_VALUE (arglist)), - fold_convert (char_type_node, - integer_zero_node)); + { + tree iref = build_fold_indirect_ref (TREE_VALUE (arglist)); + return fold_build2 (code, type, iref, + build_int_cst (TREE_TYPE (iref), 0)); + } } /* We can fold X/C1 op C2 where C1 and C2 are integer constants @@ -9834,10 +9774,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) } if ((code == EQ_EXPR || code == NE_EXPR) - && !TREE_SIDE_EFFECTS (arg0) && integer_zerop (arg1) && tree_expr_nonzero_p (arg0)) - return constant_boolean_node (code==NE_EXPR, type); + { + tree res = constant_boolean_node (code==NE_EXPR, type); + return omit_one_operand (type, res, arg0); + } t1 = fold_relational_const (code, type, arg0, arg1); return t1 == NULL_TREE ? NULL_TREE : t1; @@ -9950,7 +9892,7 @@ contains_label_1 (tree *tp, } } -/* Checks wheter the sub-tree ST contains a label LABEL_EXPR which is +/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is accessible from outside the sub-tree. Returns NULL_TREE if no addressable label is found. */ @@ -10002,9 +9944,11 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) if (TREE_CODE (arg0) == CONSTRUCTOR && ! type_contains_placeholder_p (TREE_TYPE (arg0))) { - tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0)); - if (m) - return TREE_VALUE (m); + unsigned HOST_WIDE_INT idx; + tree field, value; + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value) + if (field == arg1) + return value; } return NULL_TREE; @@ -10180,6 +10124,32 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) } return NULL_TREE; + case BIT_FIELD_REF: + if (TREE_CODE (arg0) == VECTOR_CST + && type == TREE_TYPE (TREE_TYPE (arg0)) + && host_integerp (arg1, 1) + && host_integerp (op2, 1)) + { + unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1); + unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1); + + if (width != 0 + && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1 + && (idx % width) == 0 + && (idx = idx / width) + < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))) + { + tree elements = TREE_VECTOR_CST_ELTS (arg0); + while (idx-- > 0 && elements) + elements = TREE_CHAIN (elements); + if (elements) + return TREE_VALUE (elements); + else + return fold_convert (type, integer_zero_node); + } + } + return NULL_TREE; + default: return NULL_TREE; } /* switch (code) */ @@ -10313,12 +10283,14 @@ fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht) { void **slot; enum tree_code code; - char buf[sizeof (struct tree_decl)]; + char buf[sizeof (struct tree_decl_non_common)]; int i, len; + +recursive_label: gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree) - <= sizeof (struct tree_decl)) - && sizeof (struct tree_type) <= sizeof (struct tree_decl)); + <= sizeof (struct tree_decl_non_common)) + && sizeof (struct tree_type) <= sizeof (struct tree_decl_non_common)); if (expr == NULL) return; slot = htab_find_slot (ht, expr, INSERT); @@ -10336,11 +10308,13 @@ fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht) } else if (TREE_CODE_CLASS (code) == tcc_type && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr) - || TYPE_CACHED_VALUES_P (expr))) + || TYPE_CACHED_VALUES_P (expr) + || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr))) { /* Allow these fields to be modified. */ memcpy (buf, expr, tree_size (expr)); expr = (tree) buf; + TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0; TYPE_POINTER_TO (expr) = NULL; TYPE_REFERENCE_TO (expr) = NULL; if (TYPE_CACHED_VALUES_P (expr)) @@ -10352,7 +10326,8 @@ fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht) md5_process_bytes (expr, tree_size (expr), ctx); fold_checksum_tree (TREE_TYPE (expr), ctx, ht); if (TREE_CODE_CLASS (code) != tcc_type - && TREE_CODE_CLASS (code) != tcc_declaration) + && TREE_CODE_CLASS (code) != tcc_declaration + && code != TREE_LIST) fold_checksum_tree (TREE_CHAIN (expr), ctx, ht); switch (TREE_CODE_CLASS (code)) { @@ -10380,6 +10355,8 @@ fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht) case TREE_LIST: fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht); fold_checksum_tree (TREE_VALUE (expr), ctx, ht); + expr = TREE_CHAIN (expr); + goto recursive_label; break; case TREE_VEC: for (i = 0; i < TREE_VEC_LENGTH (expr); ++i) @@ -10445,13 +10422,35 @@ fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht) operand OP0. */ tree -fold_build1 (enum tree_code code, tree type, tree op0) +fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL) { - tree tem = fold_unary (code, type, op0); - if (tem) - return tem; + tree tem; +#ifdef ENABLE_FOLD_CHECKING + unsigned char checksum_before[16], checksum_after[16]; + struct md5_ctx ctx; + htab_t ht; + + ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); + md5_init_ctx (&ctx); + fold_checksum_tree (op0, &ctx, ht); + md5_finish_ctx (&ctx, checksum_before); + htab_empty (ht); +#endif + + tem = fold_unary (code, type, op0); + if (!tem) + tem = build1_stat (code, type, op0 PASS_MEM_STAT); + +#ifdef ENABLE_FOLD_CHECKING + md5_init_ctx (&ctx); + fold_checksum_tree (op0, &ctx, ht); + md5_finish_ctx (&ctx, checksum_after); + htab_delete (ht); - return build1 (code, type, op0); + if (memcmp (checksum_before, checksum_after, 16)) + fold_check_failed (op0, tem); +#endif + return tem; } /* Fold a binary tree expression with code CODE of type TYPE with @@ -10460,13 +10459,52 @@ fold_build1 (enum tree_code code, tree type, tree op0) with operands OP0 and OP1. */ tree -fold_build2 (enum tree_code code, tree type, tree op0, tree op1) +fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1 + MEM_STAT_DECL) { - tree tem = fold_binary (code, type, op0, op1); - if (tem) - return tem; + tree tem; +#ifdef ENABLE_FOLD_CHECKING + unsigned char checksum_before_op0[16], + checksum_before_op1[16], + checksum_after_op0[16], + checksum_after_op1[16]; + struct md5_ctx ctx; + htab_t ht; + + ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); + md5_init_ctx (&ctx); + fold_checksum_tree (op0, &ctx, ht); + md5_finish_ctx (&ctx, checksum_before_op0); + htab_empty (ht); + + md5_init_ctx (&ctx); + fold_checksum_tree (op1, &ctx, ht); + md5_finish_ctx (&ctx, checksum_before_op1); + htab_empty (ht); +#endif - return build2 (code, type, op0, op1); + tem = fold_binary (code, type, op0, op1); + if (!tem) + tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT); + +#ifdef ENABLE_FOLD_CHECKING + md5_init_ctx (&ctx); + fold_checksum_tree (op0, &ctx, ht); + md5_finish_ctx (&ctx, checksum_after_op0); + htab_empty (ht); + + if (memcmp (checksum_before_op0, checksum_after_op0, 16)) + fold_check_failed (op0, tem); + + md5_init_ctx (&ctx); + fold_checksum_tree (op1, &ctx, ht); + md5_finish_ctx (&ctx, checksum_after_op1); + htab_delete (ht); + + if (memcmp (checksum_before_op1, checksum_after_op1, 16)) + fold_check_failed (op1, tem); +#endif + return tem; } /* Fold a ternary tree expression with code CODE of type TYPE with @@ -10475,13 +10513,67 @@ fold_build2 (enum tree_code code, tree type, tree op0, tree op1) type TYPE with operands OP0, OP1, and OP2. */ tree -fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2) +fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2 + MEM_STAT_DECL) { - tree tem = fold_ternary (code, type, op0, op1, op2); - if (tem) - return tem; + tree tem; +#ifdef ENABLE_FOLD_CHECKING + unsigned char checksum_before_op0[16], + checksum_before_op1[16], + checksum_before_op2[16], + checksum_after_op0[16], + checksum_after_op1[16], + checksum_after_op2[16]; + struct md5_ctx ctx; + htab_t ht; + + ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); + md5_init_ctx (&ctx); + fold_checksum_tree (op0, &ctx, ht); + md5_finish_ctx (&ctx, checksum_before_op0); + htab_empty (ht); + + md5_init_ctx (&ctx); + fold_checksum_tree (op1, &ctx, ht); + md5_finish_ctx (&ctx, checksum_before_op1); + htab_empty (ht); + + md5_init_ctx (&ctx); + fold_checksum_tree (op2, &ctx, ht); + md5_finish_ctx (&ctx, checksum_before_op2); + htab_empty (ht); +#endif + + tem = fold_ternary (code, type, op0, op1, op2); + if (!tem) + tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT); + +#ifdef ENABLE_FOLD_CHECKING + md5_init_ctx (&ctx); + fold_checksum_tree (op0, &ctx, ht); + md5_finish_ctx (&ctx, checksum_after_op0); + htab_empty (ht); + + if (memcmp (checksum_before_op0, checksum_after_op0, 16)) + fold_check_failed (op0, tem); + + md5_init_ctx (&ctx); + fold_checksum_tree (op1, &ctx, ht); + md5_finish_ctx (&ctx, checksum_after_op1); + htab_empty (ht); - return build3 (code, type, op0, op1, op2); + if (memcmp (checksum_before_op1, checksum_after_op1, 16)) + fold_check_failed (op1, tem); + + md5_init_ctx (&ctx); + fold_checksum_tree (op2, &ctx, ht); + md5_finish_ctx (&ctx, checksum_after_op2); + htab_delete (ht); + + if (memcmp (checksum_before_op2, checksum_after_op2, 16)) + fold_check_failed (op2, tem); +#endif + return tem; } /* Perform constant folding and related simplification of initializer @@ -10630,10 +10722,17 @@ multiple_of_p (tree type, tree top, tree bottom) int tree_expr_nonnegative_p (tree t) { + if (TYPE_UNSIGNED (TREE_TYPE (t))) + return 1; + switch (TREE_CODE (t)) { case ABS_EXPR: - return 1; + /* We can't return 1 if flag_wrapv is set because + ABS_EXPR = INT_MIN. */ + if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t)))) + return 1; + break; case INTEGER_CST: return tree_int_cst_sgn (t) >= 0; @@ -10816,7 +10915,6 @@ tree_expr_nonnegative_p (tree t) CASE_BUILTIN_F (BUILT_IN_EXP2) CASE_BUILTIN_F (BUILT_IN_FABS) CASE_BUILTIN_F (BUILT_IN_FDIM) - CASE_BUILTIN_F (BUILT_IN_FREXP) CASE_BUILTIN_F (BUILT_IN_HYPOT) CASE_BUILTIN_F (BUILT_IN_POW10) CASE_BUILTIN_I (BUILT_IN_FFS) @@ -10840,6 +10938,7 @@ tree_expr_nonnegative_p (tree t) CASE_BUILTIN_F (BUILT_IN_EXPM1) CASE_BUILTIN_F (BUILT_IN_FLOOR) CASE_BUILTIN_F (BUILT_IN_FMOD) + CASE_BUILTIN_F (BUILT_IN_FREXP) CASE_BUILTIN_F (BUILT_IN_LCEIL) CASE_BUILTIN_F (BUILT_IN_LDEXP) CASE_BUILTIN_F (BUILT_IN_LFLOOR) @@ -10898,7 +10997,7 @@ tree_expr_nonnegative_p (tree t) For floating point we further ensure that T is not denormal. Similar logic is present in nonzero_address in rtlanal.h. */ -static bool +bool tree_expr_nonzero_p (tree t) { tree type = TREE_TYPE (t); @@ -10910,8 +11009,7 @@ tree_expr_nonzero_p (tree t) switch (TREE_CODE (t)) { case ABS_EXPR: - if (!TYPE_UNSIGNED (type) && !flag_wrapv) - return tree_expr_nonzero_p (TREE_OPERAND (t, 0)); + return tree_expr_nonzero_p (TREE_OPERAND (t, 0)); case INTEGER_CST: /* We used to test for !integer_zerop here. This does not work correctly @@ -10959,7 +11057,7 @@ tree_expr_nonzero_p (tree t) return false; /* Weak declarations may link to NULL. */ - if (DECL_P (base)) + if (VAR_OR_FUNCTION_DECL_P (base)) return !DECL_WEAK (base); /* Constants are never weak. */ @@ -11006,6 +11104,9 @@ tree_expr_nonzero_p (tree t) return tree_expr_nonzero_p (TREE_OPERAND (t, 1)) || tree_expr_nonzero_p (TREE_OPERAND (t, 0)); + case CALL_EXPR: + return alloca_call_p (t); + default: break; } @@ -11370,17 +11471,17 @@ build_fold_addr_expr (tree t) return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t))); } -/* Given a pointer value T, return a simplified version of an indirection - through T, or NULL_TREE if no simplification is possible. */ +/* Given a pointer value OP0 and a type TYPE, return a simplified version + of an indirection through OP0, or NULL_TREE if no simplification is + possible. */ -static tree -fold_indirect_ref_1 (tree t) +tree +fold_indirect_ref_1 (tree type, tree op0) { - tree type = TREE_TYPE (TREE_TYPE (t)); - tree sub = t; + tree sub = op0; tree subtype; - STRIP_TYPE_NOPS (sub); + STRIP_NOPS (sub); subtype = TREE_TYPE (sub); if (!POINTER_TYPE_P (subtype)) return NULL_TREE; @@ -11390,11 +11491,11 @@ fold_indirect_ref_1 (tree t) tree op = TREE_OPERAND (sub, 0); tree optype = TREE_TYPE (op); /* *&p => p */ - if (lang_hooks.types_compatible_p (type, optype)) + if (type == optype) return op; /* *(foo *)&fooarray => fooarray[0] */ else if (TREE_CODE (optype) == ARRAY_TYPE - && lang_hooks.types_compatible_p (type, TREE_TYPE (optype))) + && type == TREE_TYPE (optype)) { tree type_domain = TYPE_DOMAIN (optype); tree min_val = size_zero_node; @@ -11406,7 +11507,7 @@ fold_indirect_ref_1 (tree t) /* *(foo *)fooarrptr => (*fooarrptr)[0] */ if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE - && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype)))) + && type == TREE_TYPE (TREE_TYPE (subtype))) { tree type_domain; tree min_val = size_zero_node; @@ -11426,12 +11527,13 @@ fold_indirect_ref_1 (tree t) tree build_fold_indirect_ref (tree t) { - tree sub = fold_indirect_ref_1 (t); + tree type = TREE_TYPE (TREE_TYPE (t)); + tree sub = fold_indirect_ref_1 (type, t); if (sub) return sub; else - return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (t)), t); + return build1 (INDIRECT_REF, type, t); } /* Given an INDIRECT_REF T, return either T or a simplified version. */ @@ -11439,7 +11541,7 @@ build_fold_indirect_ref (tree t) tree fold_indirect_ref (tree t) { - tree sub = fold_indirect_ref_1 (TREE_OPERAND (t, 0)); + tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0)); if (sub) return sub; @@ -11605,9 +11707,7 @@ split_address_to_core_and_offset (tree exp, core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos, poffset, &mode, &unsignedp, &volatilep, false); - - if (TREE_CODE (core) == INDIRECT_REF) - core = TREE_OPERAND (core, 0); + core = build_fold_addr_expr (core); } else {