X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fsimplify-rtx.c;h=92567fe3c41b79d4bda8df41990916b5d746dc52;hb=cd6dccd365b9354fc13cdb1604ff2a71e610072b;hp=0bef69e986fc3499fa3ee7670c55328829218f83;hpb=a6629703d0608f506dfc32c20e017d6b0c6ce535;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c index 0bef69e986f..92567fe3c41 100644 --- a/gcc/simplify-rtx.c +++ b/gcc/simplify-rtx.c @@ -1,6 +1,6 @@ /* RTL simplification functions for GNU compiler. Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. + 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. This file is part of GCC. @@ -50,7 +50,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA ((((HOST_WIDE_INT) low) < 0) ? ((HOST_WIDE_INT) -1) : ((HOST_WIDE_INT) 0)) static rtx neg_const_int (enum machine_mode, rtx); -static bool mode_signbit_p (enum machine_mode, rtx); +static bool plus_minus_operand_p (rtx); static int simplify_plus_minus_op_data_cmp (const void *, const void *); static rtx simplify_plus_minus (enum rtx_code, enum machine_mode, rtx, rtx, int); @@ -72,7 +72,7 @@ neg_const_int (enum machine_mode mode, rtx i) /* Test whether expression, X, is an immediate constant that represents the most significant bit of machine mode MODE. */ -static bool +bool mode_signbit_p (enum machine_mode mode, rtx x) { unsigned HOST_WIDE_INT val; @@ -240,38 +240,38 @@ simplify_gen_relational (enum rtx_code code, enum machine_mode mode, return gen_rtx_fmt_ee (code, mode, op0, op1); } -/* Replace all occurrences of OLD in X with NEW and try to simplify the +/* Replace all occurrences of OLD_RTX in X with NEW_RTX and try to simplify the resulting RTX. Return a new RTX which is as simplified as possible. */ rtx -simplify_replace_rtx (rtx x, rtx old, rtx new) +simplify_replace_rtx (rtx x, rtx old_rtx, rtx new_rtx) { enum rtx_code code = GET_CODE (x); enum machine_mode mode = GET_MODE (x); enum machine_mode op_mode; rtx op0, op1, op2; - /* If X is OLD, return NEW. Otherwise, if this is an expression, try + /* If X is OLD_RTX, return NEW_RTX. Otherwise, if this is an expression, try to build a new expression substituting recursively. If we can't do anything, return our input. */ - if (x == old) - return new; + if (x == old_rtx) + return new_rtx; switch (GET_RTX_CLASS (code)) { case RTX_UNARY: op0 = XEXP (x, 0); op_mode = GET_MODE (op0); - op0 = simplify_replace_rtx (op0, old, new); + op0 = simplify_replace_rtx (op0, old_rtx, new_rtx); if (op0 == XEXP (x, 0)) return x; return simplify_gen_unary (code, mode, op0, op_mode); case RTX_BIN_ARITH: case RTX_COMM_ARITH: - op0 = simplify_replace_rtx (XEXP (x, 0), old, new); - op1 = simplify_replace_rtx (XEXP (x, 1), old, new); + op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx); + op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx); if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1)) return x; return simplify_gen_binary (code, mode, op0, op1); @@ -281,8 +281,8 @@ simplify_replace_rtx (rtx x, rtx old, rtx new) op0 = XEXP (x, 0); op1 = XEXP (x, 1); op_mode = GET_MODE (op0) != VOIDmode ? GET_MODE (op0) : GET_MODE (op1); - op0 = simplify_replace_rtx (op0, old, new); - op1 = simplify_replace_rtx (op1, old, new); + op0 = simplify_replace_rtx (op0, old_rtx, new_rtx); + op1 = simplify_replace_rtx (op1, old_rtx, new_rtx); if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1)) return x; return simplify_gen_relational (code, mode, op_mode, op0, op1); @@ -291,9 +291,9 @@ simplify_replace_rtx (rtx x, rtx old, rtx new) case RTX_BITFIELD_OPS: op0 = XEXP (x, 0); op_mode = GET_MODE (op0); - op0 = simplify_replace_rtx (op0, old, new); - op1 = simplify_replace_rtx (XEXP (x, 1), old, new); - op2 = simplify_replace_rtx (XEXP (x, 2), old, new); + op0 = simplify_replace_rtx (op0, old_rtx, new_rtx); + op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx); + op2 = simplify_replace_rtx (XEXP (x, 2), old_rtx, new_rtx); if (op0 == XEXP (x, 0) && op1 == XEXP (x, 1) && op2 == XEXP (x, 2)) return x; if (op_mode == VOIDmode) @@ -304,7 +304,7 @@ simplify_replace_rtx (rtx x, rtx old, rtx new) /* The only case we try to handle is a SUBREG. */ if (code == SUBREG) { - op0 = simplify_replace_rtx (SUBREG_REG (x), old, new); + op0 = simplify_replace_rtx (SUBREG_REG (x), old_rtx, new_rtx); if (op0 == SUBREG_REG (x)) return x; op0 = simplify_gen_subreg (GET_MODE (x), op0, @@ -317,15 +317,15 @@ simplify_replace_rtx (rtx x, rtx old, rtx new) case RTX_OBJ: if (code == MEM) { - op0 = simplify_replace_rtx (XEXP (x, 0), old, new); + op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx); if (op0 == XEXP (x, 0)) return x; return replace_equiv_address_nv (x, op0); } else if (code == LO_SUM) { - op0 = simplify_replace_rtx (XEXP (x, 0), old, new); - op1 = simplify_replace_rtx (XEXP (x, 1), old, new); + op0 = simplify_replace_rtx (XEXP (x, 0), old_rtx, new_rtx); + op1 = simplify_replace_rtx (XEXP (x, 1), old_rtx, new_rtx); /* (lo_sum (high x) x) -> x */ if (GET_CODE (op0) == HIGH && rtx_equal_p (XEXP (op0, 0), op1)) @@ -337,8 +337,8 @@ simplify_replace_rtx (rtx x, rtx old, rtx new) } else if (code == REG) { - if (REG_P (old) && REGNO (x) == REGNO (old)) - return new; + if (rtx_equal_p (x, old_rtx)) + return new_rtx; } break; @@ -360,16 +360,15 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, if (code == VEC_DUPLICATE) { - if (!VECTOR_MODE_P (mode)) - abort (); - if (GET_MODE (trueop) != VOIDmode - && !VECTOR_MODE_P (GET_MODE (trueop)) - && GET_MODE_INNER (mode) != GET_MODE (trueop)) - abort (); - if (GET_MODE (trueop) != VOIDmode - && VECTOR_MODE_P (GET_MODE (trueop)) - && GET_MODE_INNER (mode) != GET_MODE_INNER (GET_MODE (trueop))) - abort (); + gcc_assert (VECTOR_MODE_P (mode)); + if (GET_MODE (trueop) != VOIDmode) + { + if (!VECTOR_MODE_P (GET_MODE (trueop))) + gcc_assert (GET_MODE_INNER (mode) == GET_MODE (trueop)); + else + gcc_assert (GET_MODE_INNER (mode) == GET_MODE_INNER + (GET_MODE (trueop))); + } if (GET_CODE (trueop) == CONST_INT || GET_CODE (trueop) == CONST_DOUBLE || GET_CODE (trueop) == CONST_VECTOR) { @@ -387,8 +386,8 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, int in_elt_size = GET_MODE_SIZE (GET_MODE_INNER (inmode)); unsigned in_n_elts = (GET_MODE_SIZE (inmode) / in_elt_size); - if (in_n_elts >= n_elts || n_elts % in_n_elts) - abort (); + gcc_assert (in_n_elts < n_elts); + gcc_assert ((n_elts % in_n_elts) == 0); for (i = 0; i < n_elts; i++) RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop, i % in_n_elts); } @@ -408,9 +407,7 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, rtvec v = rtvec_alloc (n_elts); unsigned int i; - if (op_n_elts != n_elts) - abort (); - + gcc_assert (op_n_elts == n_elts); for (i = 0; i < n_elts; i++) { rtx x = simplify_unary_operation (code, GET_MODE_INNER (mode), @@ -541,15 +538,13 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, case ZERO_EXTEND: /* When zero-extending a CONST_INT, we need to know its original mode. */ - if (op_mode == VOIDmode) - abort (); + gcc_assert (op_mode != VOIDmode); if (GET_MODE_BITSIZE (op_mode) == HOST_BITS_PER_WIDE_INT) { /* If we were really extending the mode, we would have to distinguish between zero-extension and sign-extension. */ - if (width != GET_MODE_BITSIZE (op_mode)) - abort (); + gcc_assert (width == GET_MODE_BITSIZE (op_mode)); val = arg0; } else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) @@ -566,8 +561,7 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, /* If we were really extending the mode, we would have to distinguish between zero-extension and sign-extension. */ - if (width != GET_MODE_BITSIZE (op_mode)) - abort (); + gcc_assert (width == GET_MODE_BITSIZE (op_mode)); val = arg0; } else if (GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT) @@ -590,7 +584,7 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, return 0; default: - abort (); + gcc_unreachable (); } val = trunc_int_for_mode (val, mode); @@ -690,8 +684,7 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, break; case ZERO_EXTEND: - if (op_mode == VOIDmode) - abort (); + gcc_assert (op_mode != VOIDmode); if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT) return 0; @@ -766,7 +759,7 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, real_from_target (&d, tmp, mode); } default: - abort (); + gcc_unreachable (); } return CONST_DOUBLE_FROM_REAL_VALUE (d, mode); } @@ -864,7 +857,7 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, break; default: - abort (); + gcc_unreachable (); } return immed_double_const (xl, xh, mode); } @@ -1065,7 +1058,7 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, && mode == Pmode && GET_MODE (op) == ptr_mode && (CONSTANT_P (op) || (GET_CODE (op) == SUBREG - && GET_CODE (SUBREG_REG (op)) == REG + && REG_P (SUBREG_REG (op)) && REG_POINTER (SUBREG_REG (op)) && GET_MODE (SUBREG_REG (op)) == Pmode))) return convert_memory_address (Pmode, op); @@ -1087,7 +1080,7 @@ simplify_unary_operation (enum rtx_code code, enum machine_mode mode, && mode == Pmode && GET_MODE (op) == ptr_mode && (CONSTANT_P (op) || (GET_CODE (op) == SUBREG - && GET_CODE (SUBREG_REG (op)) == REG + && REG_P (SUBREG_REG (op)) && REG_POINTER (SUBREG_REG (op)) && GET_MODE (SUBREG_REG (op)) == Pmode))) return convert_memory_address (Pmode, op); @@ -1175,16 +1168,12 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, rtx trueop0, trueop1; rtx tem; -#ifdef ENABLE_CHECKING /* Relational operations don't work here. We must know the mode of the operands in order to do the comparison correctly. Assuming a full word can give incorrect results. Consider comparing 128 with -128 in QImode. */ - - if (GET_RTX_CLASS (code) == RTX_COMPARE - || GET_RTX_CLASS (code) == RTX_COMM_COMPARE) - abort (); -#endif + gcc_assert (GET_RTX_CLASS (code) != RTX_COMPARE); + gcc_assert (GET_RTX_CLASS (code) != RTX_COMM_COMPARE); /* Make sure the constant is second. */ if (GET_RTX_CLASS (code) == RTX_COMM_ARITH @@ -1197,23 +1186,20 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, trueop1 = avoid_constant_pool_reference (op1); if (VECTOR_MODE_P (mode) + && code != VEC_CONCAT && GET_CODE (trueop0) == CONST_VECTOR && GET_CODE (trueop1) == CONST_VECTOR) { - int elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode)); - unsigned n_elts = (GET_MODE_SIZE (mode) / elt_size); + unsigned n_elts = GET_MODE_NUNITS (mode); enum machine_mode op0mode = GET_MODE (trueop0); - int op0_elt_size = GET_MODE_SIZE (GET_MODE_INNER (op0mode)); - unsigned op0_n_elts = (GET_MODE_SIZE (op0mode) / op0_elt_size); + unsigned op0_n_elts = GET_MODE_NUNITS (op0mode); enum machine_mode op1mode = GET_MODE (trueop1); - int op1_elt_size = GET_MODE_SIZE (GET_MODE_INNER (op1mode)); - unsigned op1_n_elts = (GET_MODE_SIZE (op1mode) / op1_elt_size); + unsigned op1_n_elts = GET_MODE_NUNITS (op1mode); rtvec v = rtvec_alloc (n_elts); unsigned int i; - if (op0_n_elts != n_elts || op1_n_elts != n_elts) - abort (); - + gcc_assert (op0_n_elts == n_elts); + gcc_assert (op1_n_elts == n_elts); for (i = 0; i < n_elts; i++) { rtx x = simplify_binary_operation (code, GET_MODE_INNER (mode), @@ -1227,6 +1213,41 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, return gen_rtx_CONST_VECTOR (mode, v); } + if (VECTOR_MODE_P (mode) + && code == VEC_CONCAT + && CONSTANT_P (trueop0) && CONSTANT_P (trueop1)) + { + unsigned n_elts = GET_MODE_NUNITS (mode); + rtvec v = rtvec_alloc (n_elts); + + gcc_assert (n_elts >= 2); + if (n_elts == 2) + { + gcc_assert (GET_CODE (trueop0) != CONST_VECTOR); + gcc_assert (GET_CODE (trueop1) != CONST_VECTOR); + + RTVEC_ELT (v, 0) = trueop0; + RTVEC_ELT (v, 1) = trueop1; + } + else + { + unsigned op0_n_elts = GET_MODE_NUNITS (GET_MODE (trueop0)); + unsigned op1_n_elts = GET_MODE_NUNITS (GET_MODE (trueop1)); + unsigned i; + + gcc_assert (GET_CODE (trueop0) == CONST_VECTOR); + gcc_assert (GET_CODE (trueop1) == CONST_VECTOR); + gcc_assert (op0_n_elts + op1_n_elts == n_elts); + + for (i = 0; i < op0_n_elts; ++i) + RTVEC_ELT (v, i) = XVECEXP (trueop0, 0, i); + for (i = 0; i < op1_n_elts; ++i) + RTVEC_ELT (v, op0_n_elts+i) = XVECEXP (trueop1, 0, i); + } + + return gen_rtx_CONST_VECTOR (mode, v); + } + if (GET_MODE_CLASS (mode) == MODE_FLOAT && GET_CODE (trueop0) == CONST_DOUBLE && GET_CODE (trueop1) == CONST_DOUBLE @@ -1247,26 +1268,33 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, GET_MODE (op1)); for (i = 0; i < 4; i++) { - if (code == AND) + switch (code) + { + case AND: tmp0[i] &= tmp1[i]; - else if (code == IOR) + break; + case IOR: tmp0[i] |= tmp1[i]; - else if (code == XOR) + break; + case XOR: tmp0[i] ^= tmp1[i]; - else - abort (); + break; + default: + gcc_unreachable (); + } } real_from_target (&r, tmp0, mode); return CONST_DOUBLE_FROM_REAL_VALUE (r, mode); } else { - REAL_VALUE_TYPE f0, f1, value; + REAL_VALUE_TYPE f0, f1, value, result; + bool inexact; REAL_VALUE_FROM_CONST_DOUBLE (f0, trueop0); REAL_VALUE_FROM_CONST_DOUBLE (f1, trueop1); - f0 = real_value_truncate (mode, f0); - f1 = real_value_truncate (mode, f1); + real_convert (&f0, mode, &f0); + real_convert (&f1, mode, &f1); if (HONOR_SNANS (mode) && (REAL_VALUE_ISNAN (f0) || REAL_VALUE_ISNAN (f1))) @@ -1277,10 +1305,53 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode))) return 0; - REAL_ARITHMETIC (value, rtx_to_tree_code (code), f0, f1); + if (MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode) + && flag_trapping_math + && REAL_VALUE_ISINF (f0) && REAL_VALUE_ISINF (f1)) + { + int s0 = REAL_VALUE_NEGATIVE (f0); + int s1 = REAL_VALUE_NEGATIVE (f1); + + switch (code) + { + case PLUS: + /* Inf + -Inf = NaN plus exception. */ + if (s0 != s1) + return 0; + break; + case MINUS: + /* Inf - Inf = NaN plus exception. */ + if (s0 == s1) + return 0; + break; + case DIV: + /* Inf / Inf = NaN plus exception. */ + return 0; + default: + break; + } + } + + if (code == MULT && MODE_HAS_INFINITIES (mode) && HONOR_NANS (mode) + && flag_trapping_math + && ((REAL_VALUE_ISINF (f0) && REAL_VALUES_EQUAL (f1, dconst0)) + || (REAL_VALUE_ISINF (f1) + && REAL_VALUES_EQUAL (f0, dconst0)))) + /* Inf * 0 = NaN plus exception. */ + return 0; + + inexact = real_arithmetic (&value, rtx_to_tree_code (code), + &f0, &f1); + real_convert (&result, mode, &value); - value = real_value_truncate (mode, value); - return CONST_DOUBLE_FROM_REAL_VALUE (value, mode); + /* Don't constant fold this floating point operation if the + result may dependent upon the run-time rounding mode and + flag_rounding_math is set. */ + if (flag_rounding_math + && (inexact || !real_identical (&result, &value))) + return NULL_RTX; + + return CONST_DOUBLE_FROM_REAL_VALUE (result, mode); } } @@ -1471,13 +1542,12 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, if the multiplication is written as a shift. If so, we can distribute and make a new multiply, shift, or maybe just have X (if C is 2 in the example above). But don't make - real multiply if we didn't have one before. */ + something more expensive than we had before. */ if (! FLOAT_MODE_P (mode)) { HOST_WIDE_INT coeff0 = 1, coeff1 = 1; rtx lhs = op0, rhs = op1; - int had_mult = 0; if (GET_CODE (lhs) == NEG) coeff0 = -1, lhs = XEXP (lhs, 0); @@ -1485,7 +1555,6 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, && GET_CODE (XEXP (lhs, 1)) == CONST_INT) { coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0); - had_mult = 1; } else if (GET_CODE (lhs) == ASHIFT && GET_CODE (XEXP (lhs, 1)) == CONST_INT @@ -1502,7 +1571,6 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, && GET_CODE (XEXP (rhs, 1)) == CONST_INT) { coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0); - had_mult = 1; } else if (GET_CODE (rhs) == ASHIFT && GET_CODE (XEXP (rhs, 1)) == CONST_INT @@ -1515,9 +1583,11 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, if (rtx_equal_p (lhs, rhs)) { + rtx orig = gen_rtx_PLUS (mode, op0, op1); tem = simplify_gen_binary (MULT, mode, lhs, - GEN_INT (coeff0 + coeff1)); - return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem; + GEN_INT (coeff0 + coeff1)); + return rtx_cost (tem, SET) <= rtx_cost (orig, SET) + ? tem : 0; } } @@ -1539,12 +1609,8 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, and subtle programs can break if operations are associated. */ if (INTEGRAL_MODE_P (mode) - && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS - || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS - || (GET_CODE (op0) == CONST - && GET_CODE (XEXP (op0, 0)) == PLUS) - || (GET_CODE (op1) == CONST - && GET_CODE (XEXP (op1, 0)) == PLUS)) + && (plus_minus_operand_p (op0) + || plus_minus_operand_p (op1)) && (tem = simplify_plus_minus (code, mode, op0, op1, 0)) != 0) return tem; @@ -1584,7 +1650,7 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, #ifdef HAVE_cc0 if (GET_CODE (xop00) == CC0 && GET_CODE (xop10) == CC0) #else - if (GET_CODE (xop00) == REG && GET_CODE (xop10) == REG + if (REG_P (xop00) && REG_P (xop10) && GET_MODE (xop00) == GET_MODE (xop10) && REGNO (xop00) == REGNO (xop10) && GET_MODE_CLASS (GET_MODE (xop00)) == MODE_CC @@ -1626,13 +1692,12 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, if the multiplication is written as a shift. If so, we can distribute and make a new multiply, shift, or maybe just have X (if C is 2 in the example above). But don't make - real multiply if we didn't have one before. */ + something more expensive than we had before. */ if (! FLOAT_MODE_P (mode)) { HOST_WIDE_INT coeff0 = 1, coeff1 = 1; rtx lhs = op0, rhs = op1; - int had_mult = 0; if (GET_CODE (lhs) == NEG) coeff0 = -1, lhs = XEXP (lhs, 0); @@ -1640,7 +1705,6 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, && GET_CODE (XEXP (lhs, 1)) == CONST_INT) { coeff0 = INTVAL (XEXP (lhs, 1)), lhs = XEXP (lhs, 0); - had_mult = 1; } else if (GET_CODE (lhs) == ASHIFT && GET_CODE (XEXP (lhs, 1)) == CONST_INT @@ -1657,7 +1721,6 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, && GET_CODE (XEXP (rhs, 1)) == CONST_INT) { coeff1 = INTVAL (XEXP (rhs, 1)), rhs = XEXP (rhs, 0); - had_mult = 1; } else if (GET_CODE (rhs) == ASHIFT && GET_CODE (XEXP (rhs, 1)) == CONST_INT @@ -1670,9 +1733,11 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, if (rtx_equal_p (lhs, rhs)) { + rtx orig = gen_rtx_MINUS (mode, op0, op1); tem = simplify_gen_binary (MULT, mode, lhs, GEN_INT (coeff0 - coeff1)); - return (GET_CODE (tem) == MULT && ! had_mult) ? 0 : tem; + return rtx_cost (tem, SET) <= rtx_cost (orig, SET) + ? tem : 0; } } @@ -1697,12 +1762,8 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, and subtle programs can break if operations are associated. */ if (INTEGRAL_MODE_P (mode) - && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS - || GET_CODE (op1) == PLUS || GET_CODE (op1) == MINUS - || (GET_CODE (op0) == CONST - && GET_CODE (XEXP (op0, 0)) == PLUS) - || (GET_CODE (op1) == CONST - && GET_CODE (XEXP (op1, 0)) == PLUS)) + && (plus_minus_operand_p (op0) + || plus_minus_operand_p (op1)) && (tem = simplify_plus_minus (code, mode, op0, op1, 0)) != 0) return tem; @@ -1758,8 +1819,7 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, uppermost bit is set, then this isn't a power of two due to implicit sign extension. */ && (width <= HOST_BITS_PER_WIDE_INT - || val != HOST_BITS_PER_WIDE_INT - 1) - && ! rtx_equal_function_value_matters) + || val != HOST_BITS_PER_WIDE_INT - 1)) return simplify_gen_binary (ASHIFT, mode, op0, GEN_INT (val)); /* x*2 is x+x and x*(-1) is -x */ @@ -1844,9 +1904,11 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, case AND: if (trueop1 == const0_rtx && ! side_effects_p (op0)) return const0_rtx; + /* If we are turning off bits already known off in OP0, we need + not do an AND. */ if (GET_CODE (trueop1) == CONST_INT - && ((INTVAL (trueop1) & GET_MODE_MASK (mode)) - == GET_MODE_MASK (mode))) + && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT + && (nonzero_bits (trueop0, mode) & ~INTVAL (trueop1)) == 0) return op0; if (trueop0 == trueop1 && ! side_effects_p (op0) && GET_MODE_CLASS (mode) != MODE_CC) @@ -1857,6 +1919,69 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, && ! side_effects_p (op0) && GET_MODE_CLASS (mode) != MODE_CC) return const0_rtx; + + /* Transform (and (extend X) C) into (zero_extend (and X C)) if + there are no non-zero bits of C outside of X's mode. */ + if ((GET_CODE (op0) == SIGN_EXTEND + || GET_CODE (op0) == ZERO_EXTEND) + && GET_CODE (trueop1) == CONST_INT + && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT + && (~GET_MODE_MASK (GET_MODE (XEXP (op0, 0))) + & INTVAL (trueop1)) == 0) + { + enum machine_mode imode = GET_MODE (XEXP (op0, 0)); + tem = simplify_gen_binary (AND, imode, XEXP (op0, 0), + gen_int_mode (INTVAL (trueop1), + imode)); + return simplify_gen_unary (ZERO_EXTEND, mode, tem, imode); + } + + /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M, + ((A & N) + B) & M -> (A + B) & M + Similarly if (N & M) == 0, + ((A | N) + B) & M -> (A + B) & M + and for - instead of + and/or ^ instead of |. */ + if (GET_CODE (trueop1) == CONST_INT + && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT + && ~INTVAL (trueop1) + && (INTVAL (trueop1) & (INTVAL (trueop1) + 1)) == 0 + && (GET_CODE (op0) == PLUS || GET_CODE (op0) == MINUS)) + { + rtx pmop[2]; + int which; + + pmop[0] = XEXP (op0, 0); + pmop[1] = XEXP (op0, 1); + + for (which = 0; which < 2; which++) + { + tem = pmop[which]; + switch (GET_CODE (tem)) + { + case AND: + if (GET_CODE (XEXP (tem, 1)) == CONST_INT + && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) + == INTVAL (trueop1)) + pmop[which] = XEXP (tem, 0); + break; + case IOR: + case XOR: + if (GET_CODE (XEXP (tem, 1)) == CONST_INT + && (INTVAL (XEXP (tem, 1)) & INTVAL (trueop1)) == 0) + pmop[which] = XEXP (tem, 0); + break; + default: + break; + } + } + + if (pmop[0] != XEXP (op0, 0) || pmop[1] != XEXP (op0, 1)) + { + tem = simplify_gen_binary (GET_CODE (op0), mode, + pmop[0], pmop[1]); + return simplify_gen_binary (code, mode, tem, op1); + } + } tem = simplify_associative_operation (code, mode, op0, op1); if (tem) return tem; @@ -2062,24 +2187,22 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, case VEC_SELECT: if (!VECTOR_MODE_P (mode)) { - if (!VECTOR_MODE_P (GET_MODE (trueop0)) - || (mode - != GET_MODE_INNER (GET_MODE (trueop0))) - || GET_CODE (trueop1) != PARALLEL - || XVECLEN (trueop1, 0) != 1 - || GET_CODE (XVECEXP (trueop1, 0, 0)) != CONST_INT) - abort (); + gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0))); + gcc_assert (mode == GET_MODE_INNER (GET_MODE (trueop0))); + gcc_assert (GET_CODE (trueop1) == PARALLEL); + gcc_assert (XVECLEN (trueop1, 0) == 1); + gcc_assert (GET_CODE (XVECEXP (trueop1, 0, 0)) == CONST_INT); if (GET_CODE (trueop0) == CONST_VECTOR) - return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP (trueop1, 0, 0))); + return CONST_VECTOR_ELT (trueop0, INTVAL (XVECEXP + (trueop1, 0, 0))); } else { - if (!VECTOR_MODE_P (GET_MODE (trueop0)) - || (GET_MODE_INNER (mode) - != GET_MODE_INNER (GET_MODE (trueop0))) - || GET_CODE (trueop1) != PARALLEL) - abort (); + gcc_assert (VECTOR_MODE_P (GET_MODE (trueop0))); + gcc_assert (GET_MODE_INNER (mode) + == GET_MODE_INNER (GET_MODE (trueop0))); + gcc_assert (GET_CODE (trueop1) == PARALLEL); if (GET_CODE (trueop0) == CONST_VECTOR) { @@ -2088,15 +2211,14 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, rtvec v = rtvec_alloc (n_elts); unsigned int i; - if (XVECLEN (trueop1, 0) != (int) n_elts) - abort (); + gcc_assert (XVECLEN (trueop1, 0) == (int) n_elts); for (i = 0; i < n_elts; i++) { rtx x = XVECEXP (trueop1, 0, i); - if (GET_CODE (x) != CONST_INT) - abort (); - RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, INTVAL (x)); + gcc_assert (GET_CODE (x) == CONST_INT); + RTVEC_ELT (v, i) = CONST_VECTOR_ELT (trueop0, + INTVAL (x)); } return gen_rtx_CONST_VECTOR (mode, v); @@ -2112,24 +2234,21 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, ? GET_MODE (trueop1) : GET_MODE_INNER (mode)); - if (!VECTOR_MODE_P (mode) - || (GET_MODE_SIZE (op0_mode) + GET_MODE_SIZE (op1_mode) - != GET_MODE_SIZE (mode))) - abort (); - - if ((VECTOR_MODE_P (op0_mode) - && (GET_MODE_INNER (mode) - != GET_MODE_INNER (op0_mode))) - || (!VECTOR_MODE_P (op0_mode) - && GET_MODE_INNER (mode) != op0_mode)) - abort (); - - if ((VECTOR_MODE_P (op1_mode) - && (GET_MODE_INNER (mode) - != GET_MODE_INNER (op1_mode))) - || (!VECTOR_MODE_P (op1_mode) - && GET_MODE_INNER (mode) != op1_mode)) - abort (); + gcc_assert (VECTOR_MODE_P (mode)); + gcc_assert (GET_MODE_SIZE (op0_mode) + GET_MODE_SIZE (op1_mode) + == GET_MODE_SIZE (mode)); + + if (VECTOR_MODE_P (op0_mode)) + gcc_assert (GET_MODE_INNER (mode) + == GET_MODE_INNER (op0_mode)); + else + gcc_assert (GET_MODE_INNER (mode) == op0_mode); + + if (VECTOR_MODE_P (op1_mode)) + gcc_assert (GET_MODE_INNER (mode) + == GET_MODE_INNER (op1_mode)); + else + gcc_assert (GET_MODE_INNER (mode) == op1_mode); if ((GET_CODE (trueop0) == CONST_VECTOR || GET_CODE (trueop0) == CONST_INT @@ -2171,7 +2290,7 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, return 0; default: - abort (); + gcc_unreachable (); } return 0; @@ -2263,41 +2382,26 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, break; case LSHIFTRT: - /* If shift count is undefined, don't fold it; let the machine do - what it wants. But truncate it if the machine will do that. */ - if (arg1 < 0) - return 0; - - if (SHIFT_COUNT_TRUNCATED) - arg1 %= width; - - val = ((unsigned HOST_WIDE_INT) arg0) >> arg1; - break; - case ASHIFT: - if (arg1 < 0) - return 0; - - if (SHIFT_COUNT_TRUNCATED) - arg1 %= width; - - val = ((unsigned HOST_WIDE_INT) arg0) << arg1; - break; - case ASHIFTRT: - if (arg1 < 0) - return 0; - + /* Truncate the shift if SHIFT_COUNT_TRUNCATED, otherwise make sure the + value is in range. We can't return any old value for out-of-range + arguments because either the middle-end (via shift_truncation_mask) + or the back-end might be relying on target-specific knowledge. + Nor can we rely on shift_truncation_mask, since the shift might + not be part of an ashlM3, lshrM3 or ashrM3 instruction. */ if (SHIFT_COUNT_TRUNCATED) - arg1 %= width; - - val = arg0s >> arg1; + arg1 = (unsigned HOST_WIDE_INT) arg1 % width; + else if (arg1 < 0 || arg1 >= GET_MODE_BITSIZE (mode)) + return 0; - /* Bootstrap compiler may not have sign extended the right shift. - Manually extend the sign to insure bootstrap cc matches gcc. */ - if (arg0s < 0 && arg1 > 0) - val |= ((HOST_WIDE_INT) -1) << (HOST_BITS_PER_WIDE_INT - arg1); + val = (code == ASHIFT + ? ((unsigned HOST_WIDE_INT) arg0) << arg1 + : ((unsigned HOST_WIDE_INT) arg0) >> arg1); + /* Sign-extend the result for arithmetic right shifts. */ + if (code == ASHIFTRT && arg0s < 0 && arg1 > 0) + val |= ((HOST_WIDE_INT) -1) << (width - arg1); break; case ROTATERT: @@ -2348,7 +2452,7 @@ simplify_binary_operation (enum rtx_code code, enum machine_mode mode, return 0; default: - abort (); + gcc_unreachable (); } val = trunc_int_for_mode (val, mode); @@ -2624,9 +2728,21 @@ simplify_plus_minus (enum rtx_code code, enum machine_mode mode, rtx op0, return result; } +/* Check whether an operand is suitable for calling simplify_plus_minus. */ +static bool +plus_minus_operand_p (rtx x) +{ + return GET_CODE (x) == PLUS + || GET_CODE (x) == MINUS + || (GET_CODE (x) == CONST + && GET_CODE (XEXP (x, 0)) == PLUS + && CONSTANT_P (XEXP (XEXP (x, 0), 0)) + && CONSTANT_P (XEXP (XEXP (x, 0), 1))); +} + /* Like simplify_binary_operation except used for relational operators. MODE is the mode of the result. If MODE is VOIDmode, both operands must - also be VOIDmode. + not also be VOIDmode. CMP_MODE specifies in which mode the comparison is done in, so it is the mode of the operands. If CMP_MODE is VOIDmode, it is taken from @@ -2646,19 +2762,45 @@ simplify_relational_operation (enum rtx_code code, enum machine_mode mode, tem = simplify_const_relational_operation (code, cmp_mode, op0, op1); if (tem) { -#ifdef FLOAT_STORE_FLAG_VALUE if (GET_MODE_CLASS (mode) == MODE_FLOAT) { if (tem == const0_rtx) return CONST0_RTX (mode); - else if (GET_MODE_CLASS (mode) == MODE_FLOAT) - { - REAL_VALUE_TYPE val; - val = FLOAT_STORE_FLAG_VALUE (mode); - return CONST_DOUBLE_FROM_REAL_VALUE (val, mode); - } +#ifdef FLOAT_STORE_FLAG_VALUE + { + REAL_VALUE_TYPE val; + val = FLOAT_STORE_FLAG_VALUE (mode); + return CONST_DOUBLE_FROM_REAL_VALUE (val, mode); + } +#else + return NULL_RTX; +#endif } + if (VECTOR_MODE_P (mode)) + { + if (tem == const0_rtx) + return CONST0_RTX (mode); +#ifdef VECTOR_STORE_FLAG_VALUE + { + int i, units; + rtvec v; + + rtx val = VECTOR_STORE_FLAG_VALUE (mode); + if (val == NULL_RTX) + return NULL_RTX; + if (val == const1_rtx) + return CONST1_RTX (mode); + + units = GET_MODE_NUNITS (mode); + v = rtvec_alloc (units); + for (i = 0; i < units; i++) + RTVEC_ELT (v, i) = val; + return gen_rtx_raw_CONST_VECTOR (mode, v); + } +#else + return NULL_RTX; #endif + } return tem; } @@ -2689,10 +2831,13 @@ simplify_relational_operation (enum rtx_code code, enum machine_mode mode, MODE is the mode of the result, while CMP_MODE specifies in which mode the comparison is done in, so it is the mode of the operands. */ -rtx + +static rtx simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode, enum machine_mode cmp_mode, rtx op0, rtx op1) { + enum rtx_code op0code = GET_CODE (op0); + if (GET_CODE (op1) == CONST_INT) { if (INTVAL (op1) == 0 && COMPARISON_P (op0)) @@ -2708,14 +2853,29 @@ simplify_relational_operation_1 (enum rtx_code code, enum machine_mode mode, } else if (code == EQ) { - enum rtx_code new = reversed_comparison_code (op0, NULL_RTX); - if (new != UNKNOWN) - return simplify_gen_relational (new, mode, VOIDmode, + enum rtx_code new_code = reversed_comparison_code (op0, NULL_RTX); + if (new_code != UNKNOWN) + return simplify_gen_relational (new_code, mode, VOIDmode, XEXP (op0, 0), XEXP (op0, 1)); } } } + /* (eq/ne (plus x cst1) cst2) simplifies to (eq/ne x (cst2 - cst1)) */ + if ((code == EQ || code == NE) + && (op0code == PLUS || op0code == MINUS) + && CONSTANT_P (op1) + && CONSTANT_P (XEXP (op0, 1)) + && (INTEGRAL_MODE_P (cmp_mode) || flag_unsafe_math_optimizations)) + { + rtx x = XEXP (op0, 0); + rtx c = XEXP (op0, 1); + + c = simplify_gen_binary (op0code == PLUS ? MINUS : PLUS, + cmp_mode, op1, c); + return simplify_gen_relational (code, mode, cmp_mode, x, c); + } + return NULL_RTX; } @@ -2734,10 +2894,9 @@ simplify_const_relational_operation (enum rtx_code code, rtx trueop0; rtx trueop1; - if (mode == VOIDmode - && (GET_MODE (op0) != VOIDmode - || GET_MODE (op1) != VOIDmode)) - abort (); + gcc_assert (mode != VOIDmode + || (GET_MODE (op0) == VOIDmode + && GET_MODE (op1) == VOIDmode)); /* If op0 is a compare, extract the comparison arguments from it. */ if (GET_CODE (op0) == COMPARE && op1 == const0_rtx) @@ -2770,8 +2929,8 @@ simplify_const_relational_operation (enum rtx_code code, signed comparisons for languages such as Java, so test flag_wrapv. */ if (!flag_wrapv && INTEGRAL_MODE_P (mode) && trueop1 != const0_rtx - && ! ((GET_CODE (op0) == REG || GET_CODE (trueop0) == CONST_INT) - && (GET_CODE (op1) == REG || GET_CODE (trueop1) == CONST_INT)) + && ! ((REG_P (op0) || GET_CODE (trueop0) == CONST_INT) + && (REG_P (op1) || GET_CODE (trueop1) == CONST_INT)) && 0 != (tem = simplify_binary_operation (MINUS, mode, op0, op1)) /* We cannot do this for == or != if tem is a nonzero address. */ && ((code != EQ && code != NE) || ! nonzero_address_p (tem)) @@ -2893,7 +3052,7 @@ simplify_const_relational_operation (enum rtx_code code, else { /* Optimize comparisons with upper and lower bounds. */ - if (INTEGRAL_MODE_P (mode) + if (SCALAR_INT_MODE_P (mode) && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) { rtx mmin, mmax; @@ -3036,7 +3195,7 @@ simplify_const_relational_operation (enum rtx_code code, case UNORDERED: return const0_rtx; default: - abort (); + gcc_unreachable (); } } @@ -3093,7 +3252,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, != ((HOST_WIDE_INT) (-1) << (width - 1)))) val &= ((HOST_WIDE_INT) 1 << width) - 1; - return GEN_INT (val); + return gen_int_mode (val, mode); } break; @@ -3175,10 +3334,9 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, break; case VEC_MERGE: - if (GET_MODE (op0) != mode - || GET_MODE (op1) != mode - || !VECTOR_MODE_P (mode)) - abort (); + gcc_assert (GET_MODE (op0) == mode); + gcc_assert (GET_MODE (op1) == mode); + gcc_assert (VECTOR_MODE_P (mode)); op2 = avoid_constant_pool_reference (op2); if (GET_CODE (op2) == CONST_INT) { @@ -3209,7 +3367,7 @@ simplify_ternary_operation (enum rtx_code code, enum machine_mode mode, break; default: - abort (); + gcc_unreachable (); } return 0; @@ -3249,6 +3407,10 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op, if (GET_MODE_CLASS (outermode) == MODE_CC && GET_CODE (op) == CONST_INT) return op; + /* We have no way to represent a complex constant at the rtl level. */ + if (COMPLEX_MODE_P (outermode)) + return NULL_RTX; + /* Unpack the value. */ if (GET_CODE (op) == CONST_VECTOR) @@ -3263,11 +3425,10 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op, elems = &op; elem_bitsize = max_bitsize; } - - if (BITS_PER_UNIT % value_bit != 0) - abort (); /* Too complicated; reducing value_bit may help. */ - if (elem_bitsize % BITS_PER_UNIT != 0) - abort (); /* I don't know how to handle endianness of sub-units. */ + /* If this asserts, it is too complicated; reducing value_bit may help. */ + gcc_assert (BITS_PER_UNIT % value_bit == 0); + /* I don't know how to handle endianness of sub-units. */ + gcc_assert (elem_bitsize % BITS_PER_UNIT == 0); for (elem = 0; elem < num_elem; elem++) { @@ -3304,8 +3465,7 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op, { /* If this triggers, someone should have generated a CONST_INT instead. */ - if (elem_bitsize <= HOST_BITS_PER_WIDE_INT) - abort (); + gcc_assert (elem_bitsize > HOST_BITS_PER_WIDE_INT); for (i = 0; i < HOST_BITS_PER_WIDE_INT; i += value_bit) *vp++ = CONST_DOUBLE_LOW (el) >> i; @@ -3320,15 +3480,14 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op, for (; i < max_bitsize; i += value_bit) *vp++ = 0; } - else if (GET_MODE_CLASS (GET_MODE (el)) == MODE_FLOAT) + else { long tmp[max_bitsize / 32]; int bitsize = GET_MODE_BITSIZE (GET_MODE (el)); - - if (bitsize > elem_bitsize) - abort (); - if (bitsize % value_bit != 0) - abort (); + + gcc_assert (GET_MODE_CLASS (GET_MODE (el)) == MODE_FLOAT); + gcc_assert (bitsize <= elem_bitsize); + gcc_assert (bitsize % value_bit == 0); real_to_target (tmp, CONST_DOUBLE_REAL_VALUE (el), GET_MODE (el)); @@ -3352,12 +3511,10 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op, for (; i < elem_bitsize; i += value_bit) *vp++ = 0; } - else - abort (); break; default: - abort (); + gcc_unreachable (); } } @@ -3377,8 +3534,7 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op, /* BYTE should still be inside OP. (Note that BYTE is unsigned, so if it's become negative it will instead be very large.) */ - if (byte >= GET_MODE_SIZE (innermode)) - abort (); + gcc_assert (byte < GET_MODE_SIZE (innermode)); /* Convert from bytes to chunks of size value_bit. */ value_start = byte * (BITS_PER_UNIT / value_bit); @@ -3402,10 +3558,8 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op, outer_class = GET_MODE_CLASS (outer_submode); elem_bitsize = GET_MODE_BITSIZE (outer_submode); - if (elem_bitsize % value_bit != 0) - abort (); - if (elem_bitsize + value_start * value_bit > max_bitsize) - abort (); + gcc_assert (elem_bitsize % value_bit == 0); + gcc_assert (elem_bitsize + value_start * value_bit <= max_bitsize); for (elem = 0; elem < num_elem; elem++) { @@ -3475,7 +3629,7 @@ simplify_immed_subreg (enum machine_mode outermode, rtx op, break; default: - abort (); + gcc_unreachable (); } } if (VECTOR_MODE_P (outermode)) @@ -3491,17 +3645,16 @@ simplify_subreg (enum machine_mode outermode, rtx op, enum machine_mode innermode, unsigned int byte) { /* Little bit of sanity checking. */ - if (innermode == VOIDmode || outermode == VOIDmode - || innermode == BLKmode || outermode == BLKmode) - abort (); + gcc_assert (innermode != VOIDmode); + gcc_assert (outermode != VOIDmode); + gcc_assert (innermode != BLKmode); + gcc_assert (outermode != BLKmode); - if (GET_MODE (op) != innermode - && GET_MODE (op) != VOIDmode) - abort (); + gcc_assert (GET_MODE (op) == innermode + || GET_MODE (op) == VOIDmode); - if (byte % GET_MODE_SIZE (outermode) - || byte >= GET_MODE_SIZE (innermode)) - abort (); + gcc_assert ((byte % GET_MODE_SIZE (outermode)) == 0); + gcc_assert (byte < GET_MODE_SIZE (innermode)); if (outermode == innermode && !byte) return op; @@ -3517,7 +3670,7 @@ simplify_subreg (enum machine_mode outermode, rtx op, { enum machine_mode innermostmode = GET_MODE (SUBREG_REG (op)); int final_offset = byte + SUBREG_BYTE (op); - rtx new; + rtx newx; if (outermode == innermostmode && byte == 0 && SUBREG_BYTE (op) == 0) @@ -3574,12 +3727,14 @@ simplify_subreg (enum machine_mode outermode, rtx op, } /* Recurse for further possible simplifications. */ - new = simplify_subreg (outermode, SUBREG_REG (op), - GET_MODE (SUBREG_REG (op)), - final_offset); - if (new) - return new; - return gen_rtx_SUBREG (outermode, SUBREG_REG (op), final_offset); + newx = simplify_subreg (outermode, SUBREG_REG (op), innermostmode, + final_offset); + if (newx) + return newx; + if (validate_subreg (outermode, innermostmode, + SUBREG_REG (op), final_offset)) + return gen_rtx_SUBREG (outermode, SUBREG_REG (op), final_offset); + return NULL_RTX; } /* SUBREG of a hard register => just change the register number @@ -3588,8 +3743,6 @@ simplify_subreg (enum machine_mode outermode, rtx op, frame, or argument pointer, leave this as a SUBREG. */ if (REG_P (op) - && (! REG_FUNCTION_VALUE_P (op) - || ! rtx_equal_function_value_matters) && REGNO (op) < FIRST_PSEUDO_REGISTER #ifdef CANNOT_CHANGE_MODE_CLASS && ! (REG_CANNOT_CHANGE_MODE_P (REGNO (op), innermode, outermode) @@ -3609,14 +3762,15 @@ simplify_subreg (enum machine_mode outermode, rtx op, && subreg_offset_representable_p (REGNO (op), innermode, byte, outermode)) { - rtx tem = gen_rtx_SUBREG (outermode, op, byte); - int final_regno = subreg_hard_regno (tem, 0); + unsigned int regno = REGNO (op); + unsigned int final_regno + = regno + subreg_regno_offset (regno, innermode, byte, outermode); /* ??? We do allow it if the current REG is not valid for its mode. This is a kludge to work around how float/complex arguments are passed on 32-bit SPARC and should be fixed. */ if (HARD_REGNO_MODE_OK (final_regno, outermode) - || ! HARD_REGNO_MODE_OK (REGNO (op), innermode)) + || ! HARD_REGNO_MODE_OK (regno, innermode)) { rtx x = gen_rtx_REG_offset (op, outermode, final_regno, byte); @@ -3636,7 +3790,7 @@ simplify_subreg (enum machine_mode outermode, rtx op, SUBREG with it. Don't do this if the MEM has a mode-dependent address or if we would be widening it. */ - if (GET_CODE (op) == MEM + if (MEM_P (op) && ! mode_dependent_address_p (XEXP (op, 0)) /* Allow splitting of volatile memory references in case we don't have instruction to move the whole thing. */ @@ -3649,18 +3803,21 @@ simplify_subreg (enum machine_mode outermode, rtx op, of real and imaginary part. */ if (GET_CODE (op) == CONCAT) { - int is_realpart = byte < (unsigned int) GET_MODE_UNIT_SIZE (innermode); - rtx part = is_realpart ? XEXP (op, 0) : XEXP (op, 1); - unsigned int final_offset; - rtx res; + unsigned int inner_size, final_offset; + rtx part, res; + + inner_size = GET_MODE_UNIT_SIZE (innermode); + part = byte < inner_size ? XEXP (op, 0) : XEXP (op, 1); + final_offset = byte % inner_size; + if (final_offset + GET_MODE_SIZE (outermode) > inner_size) + return NULL_RTX; - final_offset = byte % (GET_MODE_UNIT_SIZE (innermode)); res = simplify_subreg (outermode, part, GET_MODE (part), final_offset); if (res) return res; - /* We can at least simplify it by referring directly to the - relevant part. */ - return gen_rtx_SUBREG (outermode, part, final_offset); + if (validate_subreg (outermode, GET_MODE (part), part, final_offset)) + return gen_rtx_SUBREG (outermode, part, final_offset); + return NULL_RTX; } /* Optimize SUBREG truncations of zero and sign extended values. */ @@ -3698,6 +3855,55 @@ simplify_subreg (enum machine_mode outermode, rtx op, return CONST0_RTX (outermode); } + /* Simplify (subreg:QI (lshiftrt:SI (sign_extend:SI (x:QI)) C), 0) into + to (ashiftrt:QI (x:QI) C), where C is a suitable small constant and + the outer subreg is effectively a truncation to the original mode. */ + if ((GET_CODE (op) == LSHIFTRT + || GET_CODE (op) == ASHIFTRT) + && SCALAR_INT_MODE_P (outermode) + /* Ensure that OUTERMODE is at least twice as wide as the INNERMODE + to avoid the possibility that an outer LSHIFTRT shifts by more + than the sign extension's sign_bit_copies and introduces zeros + into the high bits of the result. */ + && (2 * GET_MODE_BITSIZE (outermode)) <= GET_MODE_BITSIZE (innermode) + && GET_CODE (XEXP (op, 1)) == CONST_INT + && GET_CODE (XEXP (op, 0)) == SIGN_EXTEND + && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode + && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) + && subreg_lsb_1 (outermode, innermode, byte) == 0) + return simplify_gen_binary (ASHIFTRT, outermode, + XEXP (XEXP (op, 0), 0), XEXP (op, 1)); + + /* Likewise (subreg:QI (lshiftrt:SI (zero_extend:SI (x:QI)) C), 0) into + to (lshiftrt:QI (x:QI) C), where C is a suitable small constant and + the outer subreg is effectively a truncation to the original mode. */ + if ((GET_CODE (op) == LSHIFTRT + || GET_CODE (op) == ASHIFTRT) + && SCALAR_INT_MODE_P (outermode) + && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode) + && GET_CODE (XEXP (op, 1)) == CONST_INT + && GET_CODE (XEXP (op, 0)) == ZERO_EXTEND + && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode + && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) + && subreg_lsb_1 (outermode, innermode, byte) == 0) + return simplify_gen_binary (LSHIFTRT, outermode, + XEXP (XEXP (op, 0), 0), XEXP (op, 1)); + + /* Likewise (subreg:QI (ashift:SI (zero_extend:SI (x:QI)) C), 0) into + to (ashift:QI (x:QI) C), where C is a suitable small constant and + the outer subreg is effectively a truncation to the original mode. */ + if (GET_CODE (op) == ASHIFT + && SCALAR_INT_MODE_P (outermode) + && GET_MODE_BITSIZE (outermode) < GET_MODE_BITSIZE (innermode) + && GET_CODE (XEXP (op, 1)) == CONST_INT + && (GET_CODE (XEXP (op, 0)) == ZERO_EXTEND + || GET_CODE (XEXP (op, 0)) == SIGN_EXTEND) + && GET_MODE (XEXP (XEXP (op, 0), 0)) == outermode + && INTVAL (XEXP (op, 1)) < GET_MODE_BITSIZE (outermode) + && subreg_lsb_1 (outermode, innermode, byte) == 0) + return simplify_gen_binary (ASHIFT, outermode, + XEXP (XEXP (op, 0), 0), XEXP (op, 1)); + return NULL_RTX; } @@ -3707,32 +3913,23 @@ rtx simplify_gen_subreg (enum machine_mode outermode, rtx op, enum machine_mode innermode, unsigned int byte) { - rtx new; - /* Little bit of sanity checking. */ - if (innermode == VOIDmode || outermode == VOIDmode - || innermode == BLKmode || outermode == BLKmode) - abort (); + rtx newx; - if (GET_MODE (op) != innermode - && GET_MODE (op) != VOIDmode) - abort (); + newx = simplify_subreg (outermode, op, innermode, byte); + if (newx) + return newx; - if (byte % GET_MODE_SIZE (outermode) - || byte >= GET_MODE_SIZE (innermode)) - abort (); - - if (GET_CODE (op) == QUEUED) + if (GET_CODE (op) == SUBREG + || GET_CODE (op) == CONCAT + || GET_MODE (op) == VOIDmode) return NULL_RTX; - new = simplify_subreg (outermode, op, innermode, byte); - if (new) - return new; + if (validate_subreg (outermode, innermode, op, byte)) + return gen_rtx_SUBREG (outermode, op, byte); - if (GET_CODE (op) == SUBREG || GET_MODE (op) == VOIDmode) - return NULL_RTX; - - return gen_rtx_SUBREG (outermode, op, byte); + return NULL_RTX; } + /* Simplify X, an rtx expression. Return the simplified expression or NULL if no simplifications